Lines Matching refs:xhci

39 static struct xhci_segment *xhci_segment_alloc(struct xhci_hcd *xhci,  in xhci_segment_alloc()  argument
50 seg->trbs = dma_pool_alloc(xhci->segment_pool, flags, &dma); in xhci_segment_alloc()
68 static void xhci_segment_free(struct xhci_hcd *xhci, struct xhci_segment *seg) in xhci_segment_free() argument
71 dma_pool_free(xhci->segment_pool, seg->trbs, seg->dma); in xhci_segment_free()
77 static void xhci_free_segments_for_ring(struct xhci_hcd *xhci, in xhci_free_segments_for_ring() argument
85 xhci_segment_free(xhci, seg); in xhci_free_segments_for_ring()
88 xhci_segment_free(xhci, first); in xhci_free_segments_for_ring()
98 static void xhci_link_segments(struct xhci_hcd *xhci, struct xhci_segment *prev, in xhci_link_segments() argument
116 if (xhci_link_trb_quirk(xhci) || in xhci_link_segments()
118 (xhci->quirks & XHCI_AMD_0x96_HOST))) in xhci_link_segments()
128 static void xhci_link_rings(struct xhci_hcd *xhci, struct xhci_ring *ring, in xhci_link_rings() argument
138 xhci_link_segments(xhci, ring->enq_seg, first, ring->type); in xhci_link_rings()
139 xhci_link_segments(xhci, last, next, ring->type); in xhci_link_rings()
276 void xhci_ring_free(struct xhci_hcd *xhci, struct xhci_ring *ring) in xhci_ring_free() argument
284 xhci_free_segments_for_ring(xhci, ring->first_seg); in xhci_ring_free()
318 static int xhci_alloc_segments_for_ring(struct xhci_hcd *xhci, in xhci_alloc_segments_for_ring() argument
325 prev = xhci_segment_alloc(xhci, cycle_state, flags); in xhci_alloc_segments_for_ring()
334 next = xhci_segment_alloc(xhci, cycle_state, flags); in xhci_alloc_segments_for_ring()
339 xhci_segment_free(xhci, prev); in xhci_alloc_segments_for_ring()
344 xhci_link_segments(xhci, prev, next, type); in xhci_alloc_segments_for_ring()
349 xhci_link_segments(xhci, prev, *first, type); in xhci_alloc_segments_for_ring()
362 static struct xhci_ring *xhci_ring_alloc(struct xhci_hcd *xhci, in xhci_ring_alloc() argument
379 ret = xhci_alloc_segments_for_ring(xhci, &ring->first_seg, in xhci_ring_alloc()
398 void xhci_free_or_cache_endpoint_ring(struct xhci_hcd *xhci, in xhci_free_or_cache_endpoint_ring() argument
409 xhci_dbg(xhci, "Cached old ring, " in xhci_free_or_cache_endpoint_ring()
414 xhci_ring_free(xhci, virt_dev->eps[ep_index].ring); in xhci_free_or_cache_endpoint_ring()
415 xhci_dbg(xhci, "Ring cache full (%d rings), " in xhci_free_or_cache_endpoint_ring()
425 static void xhci_reinit_cached_ring(struct xhci_hcd *xhci, in xhci_reinit_cached_ring() argument
441 xhci_link_segments(xhci, seg, seg->next, type); in xhci_reinit_cached_ring()
457 int xhci_ring_expansion(struct xhci_hcd *xhci, struct xhci_ring *ring, in xhci_ring_expansion() argument
473 ret = xhci_alloc_segments_for_ring(xhci, &first, &last, in xhci_ring_expansion()
485 xhci_segment_free(xhci, first); in xhci_ring_expansion()
493 xhci_link_rings(xhci, ring, first, last, num_segs); in xhci_ring_expansion()
494 xhci_dbg_trace(xhci, trace_xhci_dbg_ring_expansion, in xhci_ring_expansion()
503 static struct xhci_container_ctx *xhci_alloc_container_ctx(struct xhci_hcd *xhci, in xhci_alloc_container_ctx() argument
516 ctx->size = HCC_64BYTE_CONTEXT(xhci->hcc_params) ? 2048 : 1024; in xhci_alloc_container_ctx()
518 ctx->size += CTX_SIZE(xhci->hcc_params); in xhci_alloc_container_ctx()
520 ctx->bytes = dma_pool_alloc(xhci->device_pool, flags, &ctx->dma); in xhci_alloc_container_ctx()
529 static void xhci_free_container_ctx(struct xhci_hcd *xhci, in xhci_free_container_ctx() argument
534 dma_pool_free(xhci->device_pool, ctx->bytes, ctx->dma); in xhci_free_container_ctx()
547 struct xhci_slot_ctx *xhci_get_slot_ctx(struct xhci_hcd *xhci, in xhci_get_slot_ctx() argument
554 (ctx->bytes + CTX_SIZE(xhci->hcc_params)); in xhci_get_slot_ctx()
557 struct xhci_ep_ctx *xhci_get_ep_ctx(struct xhci_hcd *xhci, in xhci_get_ep_ctx() argument
567 (ctx->bytes + (ep_index * CTX_SIZE(xhci->hcc_params))); in xhci_get_ep_ctx()
573 static void xhci_free_stream_ctx(struct xhci_hcd *xhci, in xhci_free_stream_ctx() argument
577 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_free_stream_ctx()
584 return dma_pool_free(xhci->small_streams_pool, in xhci_free_stream_ctx()
587 return dma_pool_free(xhci->medium_streams_pool, in xhci_free_stream_ctx()
601 static struct xhci_stream_ctx *xhci_alloc_stream_ctx(struct xhci_hcd *xhci, in xhci_alloc_stream_ctx() argument
605 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_alloc_stream_ctx()
612 return dma_pool_alloc(xhci->small_streams_pool, in xhci_alloc_stream_ctx()
615 return dma_pool_alloc(xhci->medium_streams_pool, in xhci_alloc_stream_ctx()
655 struct xhci_stream_info *xhci_alloc_stream_info(struct xhci_hcd *xhci, in xhci_alloc_stream_info() argument
665 xhci_dbg(xhci, "Allocating %u streams and %u " in xhci_alloc_stream_info()
668 if (xhci->cmd_ring_reserved_trbs == MAX_RSVD_CMD_TRBS) { in xhci_alloc_stream_info()
669 xhci_dbg(xhci, "Command ring has no reserved TRBs available\n"); in xhci_alloc_stream_info()
672 xhci->cmd_ring_reserved_trbs++; in xhci_alloc_stream_info()
689 stream_info->stream_ctx_array = xhci_alloc_stream_ctx(xhci, in xhci_alloc_stream_info()
699 xhci_alloc_command(xhci, true, true, mem_flags); in xhci_alloc_stream_info()
711 xhci_ring_alloc(xhci, 2, 1, TYPE_STREAM, mem_flags); in xhci_alloc_stream_info()
723 xhci_dbg(xhci, "Setting stream %d ring ptr to 0x%08llx\n", in xhci_alloc_stream_info()
728 xhci_ring_free(xhci, cur_ring); in xhci_alloc_stream_info()
746 xhci_ring_free(xhci, cur_ring); in xhci_alloc_stream_info()
750 xhci_free_command(xhci, stream_info->free_streams_command); in xhci_alloc_stream_info()
756 xhci->cmd_ring_reserved_trbs--; in xhci_alloc_stream_info()
763 void xhci_setup_streams_ep_input_ctx(struct xhci_hcd *xhci, in xhci_setup_streams_ep_input_ctx() argument
773 xhci_dbg_trace(xhci, trace_xhci_dbg_context_change, in xhci_setup_streams_ep_input_ctx()
800 void xhci_free_stream_info(struct xhci_hcd *xhci, in xhci_free_stream_info() argument
813 xhci_ring_free(xhci, cur_ring); in xhci_free_stream_info()
817 xhci_free_command(xhci, stream_info->free_streams_command); in xhci_free_stream_info()
818 xhci->cmd_ring_reserved_trbs--; in xhci_free_stream_info()
820 xhci_free_stream_ctx(xhci, in xhci_free_stream_info()
832 static void xhci_init_endpoint_timer(struct xhci_hcd *xhci, in xhci_init_endpoint_timer() argument
837 ep->xhci = xhci; in xhci_init_endpoint_timer()
840 static void xhci_free_tt_info(struct xhci_hcd *xhci, in xhci_free_tt_info() argument
852 virt_dev->real_port > HCS_MAX_PORTS(xhci->hcs_params1)) { in xhci_free_tt_info()
853 xhci_dbg(xhci, "Bad real port.\n"); in xhci_free_tt_info()
857 tt_list_head = &(xhci->rh_bw[virt_dev->real_port - 1].tts); in xhci_free_tt_info()
870 int xhci_alloc_tt_info(struct xhci_hcd *xhci, in xhci_alloc_tt_info() argument
892 &xhci->rh_bw[virt_dev->real_port - 1].tts); in xhci_alloc_tt_info()
903 xhci_free_tt_info(xhci, virt_dev, virt_dev->udev->slot_id); in xhci_alloc_tt_info()
913 void xhci_free_virt_device(struct xhci_hcd *xhci, int slot_id) in xhci_free_virt_device() argument
920 if (slot_id == 0 || !xhci->devs[slot_id]) in xhci_free_virt_device()
923 dev = xhci->devs[slot_id]; in xhci_free_virt_device()
924 xhci->dcbaa->dev_context_ptrs[slot_id] = 0; in xhci_free_virt_device()
933 xhci_ring_free(xhci, dev->eps[i].ring); in xhci_free_virt_device()
935 xhci_free_stream_info(xhci, in xhci_free_virt_device()
943 xhci_warn(xhci, "Slot %u endpoint %u " in xhci_free_virt_device()
948 xhci_free_tt_info(xhci, dev, slot_id); in xhci_free_virt_device()
950 xhci_update_tt_active_eps(xhci, dev, old_active_eps); in xhci_free_virt_device()
954 xhci_ring_free(xhci, dev->ring_cache[i]); in xhci_free_virt_device()
959 xhci_free_container_ctx(xhci, dev->in_ctx); in xhci_free_virt_device()
961 xhci_free_container_ctx(xhci, dev->out_ctx); in xhci_free_virt_device()
963 kfree(xhci->devs[slot_id]); in xhci_free_virt_device()
964 xhci->devs[slot_id] = NULL; in xhci_free_virt_device()
967 int xhci_alloc_virt_device(struct xhci_hcd *xhci, int slot_id, in xhci_alloc_virt_device() argument
974 if (slot_id == 0 || xhci->devs[slot_id]) { in xhci_alloc_virt_device()
975 xhci_warn(xhci, "Bad Slot ID %d\n", slot_id); in xhci_alloc_virt_device()
979 xhci->devs[slot_id] = kzalloc(sizeof(*xhci->devs[slot_id]), flags); in xhci_alloc_virt_device()
980 if (!xhci->devs[slot_id]) in xhci_alloc_virt_device()
982 dev = xhci->devs[slot_id]; in xhci_alloc_virt_device()
985 dev->out_ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_DEVICE, flags); in xhci_alloc_virt_device()
989 xhci_dbg(xhci, "Slot %d output ctx = 0x%llx (dma)\n", slot_id, in xhci_alloc_virt_device()
993 dev->in_ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_INPUT, flags); in xhci_alloc_virt_device()
997 xhci_dbg(xhci, "Slot %d input ctx = 0x%llx (dma)\n", slot_id, in xhci_alloc_virt_device()
1002 xhci_init_endpoint_timer(xhci, &dev->eps[i]); in xhci_alloc_virt_device()
1008 dev->eps[0].ring = xhci_ring_alloc(xhci, 2, 1, TYPE_CTRL, flags); in xhci_alloc_virt_device()
1024 xhci->dcbaa->dev_context_ptrs[slot_id] = cpu_to_le64(dev->out_ctx->dma); in xhci_alloc_virt_device()
1025 xhci_dbg(xhci, "Set slot id %d dcbaa entry %p to 0x%llx\n", in xhci_alloc_virt_device()
1027 &xhci->dcbaa->dev_context_ptrs[slot_id], in xhci_alloc_virt_device()
1028 le64_to_cpu(xhci->dcbaa->dev_context_ptrs[slot_id])); in xhci_alloc_virt_device()
1032 xhci_free_virt_device(xhci, slot_id); in xhci_alloc_virt_device()
1036 void xhci_copy_ep0_dequeue_into_input_ctx(struct xhci_hcd *xhci, in xhci_copy_ep0_dequeue_into_input_ctx() argument
1043 virt_dev = xhci->devs[udev->slot_id]; in xhci_copy_ep0_dequeue_into_input_ctx()
1044 ep0_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, 0); in xhci_copy_ep0_dequeue_into_input_ctx()
1069 static u32 xhci_find_real_port_number(struct xhci_hcd *xhci, in xhci_find_real_port_number() argument
1076 hcd = xhci->shared_hcd; in xhci_find_real_port_number()
1078 hcd = xhci->main_hcd; in xhci_find_real_port_number()
1088 int xhci_setup_addressable_virt_dev(struct xhci_hcd *xhci, struct usb_device *udev) in xhci_setup_addressable_virt_dev() argument
1097 dev = xhci->devs[udev->slot_id]; in xhci_setup_addressable_virt_dev()
1100 xhci_warn(xhci, "Slot ID %d is not assigned to this device\n", in xhci_setup_addressable_virt_dev()
1104 ep0_ctx = xhci_get_ep_ctx(xhci, dev->in_ctx, 0); in xhci_setup_addressable_virt_dev()
1105 slot_ctx = xhci_get_slot_ctx(xhci, dev->in_ctx); in xhci_setup_addressable_virt_dev()
1128 xhci_dbg(xhci, "FIXME xHCI doesn't support wireless speeds\n"); in xhci_setup_addressable_virt_dev()
1136 port_num = xhci_find_real_port_number(xhci, udev); in xhci_setup_addressable_virt_dev()
1146 xhci_dbg(xhci, "Set root hub portnum to %d\n", port_num); in xhci_setup_addressable_virt_dev()
1147 xhci_dbg(xhci, "Set fake root hub portnum to %d\n", dev->fake_port); in xhci_setup_addressable_virt_dev()
1156 dev->bw_table = &xhci->rh_bw[port_num - 1].bw_table; in xhci_setup_addressable_virt_dev()
1161 rh_bw = &xhci->rh_bw[port_num - 1]; in xhci_setup_addressable_virt_dev()
1176 xhci_warn(xhci, "WARN: Didn't find a matching TT\n"); in xhci_setup_addressable_virt_dev()
1186 xhci_dbg(xhci, "udev->tt = %p\n", udev->tt); in xhci_setup_addressable_virt_dev()
1187 xhci_dbg(xhci, "udev->ttport = 0x%x\n", udev->ttport); in xhci_setup_addressable_virt_dev()
1399 int xhci_endpoint_init(struct xhci_hcd *xhci, in xhci_endpoint_init() argument
1415 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, ep_index); in xhci_endpoint_init()
1425 xhci_ring_alloc(xhci, 2, 1, type, mem_flags); in xhci_endpoint_init()
1434 xhci_reinit_cached_ring(xhci, virt_dev->eps[ep_index].new_ring, in xhci_endpoint_init()
1504 if (usb_endpoint_xfer_control(&ep->desc) && xhci->hci_version >= 0x100) in xhci_endpoint_init()
1514 void xhci_endpoint_zero(struct xhci_hcd *xhci, in xhci_endpoint_zero() argument
1522 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, ep_index); in xhci_endpoint_zero()
1543 void xhci_update_bw_info(struct xhci_hcd *xhci, in xhci_update_bw_info() argument
1568 ep_ctx = xhci_get_ep_ctx(xhci, in_ctx, i); in xhci_update_bw_info()
1601 void xhci_endpoint_copy(struct xhci_hcd *xhci, in xhci_endpoint_copy() argument
1609 out_ep_ctx = xhci_get_ep_ctx(xhci, out_ctx, ep_index); in xhci_endpoint_copy()
1610 in_ep_ctx = xhci_get_ep_ctx(xhci, in_ctx, ep_index); in xhci_endpoint_copy()
1623 void xhci_slot_copy(struct xhci_hcd *xhci, in xhci_slot_copy() argument
1630 in_slot_ctx = xhci_get_slot_ctx(xhci, in_ctx); in xhci_slot_copy()
1631 out_slot_ctx = xhci_get_slot_ctx(xhci, out_ctx); in xhci_slot_copy()
1640 static int scratchpad_alloc(struct xhci_hcd *xhci, gfp_t flags) in scratchpad_alloc() argument
1643 struct device *dev = xhci_to_hcd(xhci)->self.controller; in scratchpad_alloc()
1644 int num_sp = HCS_MAX_SCRATCHPAD(xhci->hcs_params2); in scratchpad_alloc()
1646 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in scratchpad_alloc()
1652 xhci->scratchpad = kzalloc(sizeof(*xhci->scratchpad), flags); in scratchpad_alloc()
1653 if (!xhci->scratchpad) in scratchpad_alloc()
1656 xhci->scratchpad->sp_array = dma_alloc_coherent(dev, in scratchpad_alloc()
1658 &xhci->scratchpad->sp_dma, flags); in scratchpad_alloc()
1659 if (!xhci->scratchpad->sp_array) in scratchpad_alloc()
1662 xhci->scratchpad->sp_buffers = kzalloc(sizeof(void *) * num_sp, flags); in scratchpad_alloc()
1663 if (!xhci->scratchpad->sp_buffers) in scratchpad_alloc()
1666 xhci->scratchpad->sp_dma_buffers = in scratchpad_alloc()
1669 if (!xhci->scratchpad->sp_dma_buffers) in scratchpad_alloc()
1672 xhci->dcbaa->dev_context_ptrs[0] = cpu_to_le64(xhci->scratchpad->sp_dma); in scratchpad_alloc()
1675 void *buf = dma_alloc_coherent(dev, xhci->page_size, &dma, in scratchpad_alloc()
1680 xhci->scratchpad->sp_array[i] = dma; in scratchpad_alloc()
1681 xhci->scratchpad->sp_buffers[i] = buf; in scratchpad_alloc()
1682 xhci->scratchpad->sp_dma_buffers[i] = dma; in scratchpad_alloc()
1689 dma_free_coherent(dev, xhci->page_size, in scratchpad_alloc()
1690 xhci->scratchpad->sp_buffers[i], in scratchpad_alloc()
1691 xhci->scratchpad->sp_dma_buffers[i]); in scratchpad_alloc()
1693 kfree(xhci->scratchpad->sp_dma_buffers); in scratchpad_alloc()
1696 kfree(xhci->scratchpad->sp_buffers); in scratchpad_alloc()
1700 xhci->scratchpad->sp_array, in scratchpad_alloc()
1701 xhci->scratchpad->sp_dma); in scratchpad_alloc()
1704 kfree(xhci->scratchpad); in scratchpad_alloc()
1705 xhci->scratchpad = NULL; in scratchpad_alloc()
1711 static void scratchpad_free(struct xhci_hcd *xhci) in scratchpad_free() argument
1715 struct device *dev = xhci_to_hcd(xhci)->self.controller; in scratchpad_free()
1717 if (!xhci->scratchpad) in scratchpad_free()
1720 num_sp = HCS_MAX_SCRATCHPAD(xhci->hcs_params2); in scratchpad_free()
1723 dma_free_coherent(dev, xhci->page_size, in scratchpad_free()
1724 xhci->scratchpad->sp_buffers[i], in scratchpad_free()
1725 xhci->scratchpad->sp_dma_buffers[i]); in scratchpad_free()
1727 kfree(xhci->scratchpad->sp_dma_buffers); in scratchpad_free()
1728 kfree(xhci->scratchpad->sp_buffers); in scratchpad_free()
1730 xhci->scratchpad->sp_array, in scratchpad_free()
1731 xhci->scratchpad->sp_dma); in scratchpad_free()
1732 kfree(xhci->scratchpad); in scratchpad_free()
1733 xhci->scratchpad = NULL; in scratchpad_free()
1736 struct xhci_command *xhci_alloc_command(struct xhci_hcd *xhci, in xhci_alloc_command() argument
1748 xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_INPUT, in xhci_alloc_command()
1760 xhci_free_container_ctx(xhci, command->in_ctx); in xhci_alloc_command()
1780 void xhci_free_command(struct xhci_hcd *xhci, in xhci_free_command() argument
1783 xhci_free_container_ctx(xhci, in xhci_free_command()
1789 void xhci_mem_cleanup(struct xhci_hcd *xhci) in xhci_mem_cleanup() argument
1791 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_mem_cleanup()
1795 del_timer_sync(&xhci->cmd_timer); in xhci_mem_cleanup()
1798 size = sizeof(struct xhci_erst_entry)*(xhci->erst.num_entries); in xhci_mem_cleanup()
1799 if (xhci->erst.entries) in xhci_mem_cleanup()
1801 xhci->erst.entries, xhci->erst.erst_dma_addr); in xhci_mem_cleanup()
1802 xhci->erst.entries = NULL; in xhci_mem_cleanup()
1803 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed ERST"); in xhci_mem_cleanup()
1804 if (xhci->event_ring) in xhci_mem_cleanup()
1805 xhci_ring_free(xhci, xhci->event_ring); in xhci_mem_cleanup()
1806 xhci->event_ring = NULL; in xhci_mem_cleanup()
1807 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed event ring"); in xhci_mem_cleanup()
1809 if (xhci->lpm_command) in xhci_mem_cleanup()
1810 xhci_free_command(xhci, xhci->lpm_command); in xhci_mem_cleanup()
1811 xhci->lpm_command = NULL; in xhci_mem_cleanup()
1812 if (xhci->cmd_ring) in xhci_mem_cleanup()
1813 xhci_ring_free(xhci, xhci->cmd_ring); in xhci_mem_cleanup()
1814 xhci->cmd_ring = NULL; in xhci_mem_cleanup()
1815 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed command ring"); in xhci_mem_cleanup()
1816 xhci_cleanup_command_queue(xhci); in xhci_mem_cleanup()
1818 num_ports = HCS_MAX_PORTS(xhci->hcs_params1); in xhci_mem_cleanup()
1819 for (i = 0; i < num_ports && xhci->rh_bw; i++) { in xhci_mem_cleanup()
1820 struct xhci_interval_bw_table *bwt = &xhci->rh_bw[i].bw_table; in xhci_mem_cleanup()
1829 xhci_free_virt_device(xhci, i); in xhci_mem_cleanup()
1831 dma_pool_destroy(xhci->segment_pool); in xhci_mem_cleanup()
1832 xhci->segment_pool = NULL; in xhci_mem_cleanup()
1833 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed segment pool"); in xhci_mem_cleanup()
1835 dma_pool_destroy(xhci->device_pool); in xhci_mem_cleanup()
1836 xhci->device_pool = NULL; in xhci_mem_cleanup()
1837 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed device context pool"); in xhci_mem_cleanup()
1839 dma_pool_destroy(xhci->small_streams_pool); in xhci_mem_cleanup()
1840 xhci->small_streams_pool = NULL; in xhci_mem_cleanup()
1841 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_cleanup()
1844 dma_pool_destroy(xhci->medium_streams_pool); in xhci_mem_cleanup()
1845 xhci->medium_streams_pool = NULL; in xhci_mem_cleanup()
1846 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_cleanup()
1849 if (xhci->dcbaa) in xhci_mem_cleanup()
1850 dma_free_coherent(dev, sizeof(*xhci->dcbaa), in xhci_mem_cleanup()
1851 xhci->dcbaa, xhci->dcbaa->dma); in xhci_mem_cleanup()
1852 xhci->dcbaa = NULL; in xhci_mem_cleanup()
1854 scratchpad_free(xhci); in xhci_mem_cleanup()
1856 if (!xhci->rh_bw) in xhci_mem_cleanup()
1861 list_for_each_entry_safe(tt, n, &xhci->rh_bw[i].tts, tt_list) { in xhci_mem_cleanup()
1868 xhci->cmd_ring_reserved_trbs = 0; in xhci_mem_cleanup()
1869 xhci->num_usb2_ports = 0; in xhci_mem_cleanup()
1870 xhci->num_usb3_ports = 0; in xhci_mem_cleanup()
1871 xhci->num_active_eps = 0; in xhci_mem_cleanup()
1872 kfree(xhci->usb2_ports); in xhci_mem_cleanup()
1873 kfree(xhci->usb3_ports); in xhci_mem_cleanup()
1874 kfree(xhci->port_array); in xhci_mem_cleanup()
1875 kfree(xhci->rh_bw); in xhci_mem_cleanup()
1876 kfree(xhci->ext_caps); in xhci_mem_cleanup()
1878 xhci->usb2_ports = NULL; in xhci_mem_cleanup()
1879 xhci->usb3_ports = NULL; in xhci_mem_cleanup()
1880 xhci->port_array = NULL; in xhci_mem_cleanup()
1881 xhci->rh_bw = NULL; in xhci_mem_cleanup()
1882 xhci->ext_caps = NULL; in xhci_mem_cleanup()
1884 xhci->page_size = 0; in xhci_mem_cleanup()
1885 xhci->page_shift = 0; in xhci_mem_cleanup()
1886 xhci->bus_state[0].bus_suspended = 0; in xhci_mem_cleanup()
1887 xhci->bus_state[1].bus_suspended = 0; in xhci_mem_cleanup()
1890 static int xhci_test_trb_in_td(struct xhci_hcd *xhci, in xhci_test_trb_in_td() argument
1905 seg = trb_in_td(xhci, input_seg, start_trb, end_trb, input_dma, false); in xhci_test_trb_in_td()
1907 xhci_warn(xhci, "WARN: %s TRB math test %d failed!\n", in xhci_test_trb_in_td()
1909 xhci_warn(xhci, "Tested TRB math w/ seg %p and " in xhci_test_trb_in_td()
1913 xhci_warn(xhci, "starting TRB %p (0x%llx DMA), " in xhci_test_trb_in_td()
1917 xhci_warn(xhci, "Expected seg %p, got seg %p\n", in xhci_test_trb_in_td()
1919 trb_in_td(xhci, input_seg, start_trb, end_trb, input_dma, in xhci_test_trb_in_td()
1927 static int xhci_check_trb_in_td_math(struct xhci_hcd *xhci) in xhci_check_trb_in_td_math() argument
1936 { xhci->event_ring->first_seg->dma - 16, NULL }, in xhci_check_trb_in_td_math()
1938 { xhci->event_ring->first_seg->dma - 1, NULL }, in xhci_check_trb_in_td_math()
1940 { xhci->event_ring->first_seg->dma, xhci->event_ring->first_seg }, in xhci_check_trb_in_td_math()
1942 { xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 1)*16, in xhci_check_trb_in_td_math()
1943 xhci->event_ring->first_seg }, in xhci_check_trb_in_td_math()
1945 { xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 1)*16 + 1, NULL }, in xhci_check_trb_in_td_math()
1947 { xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT)*16, NULL }, in xhci_check_trb_in_td_math()
1959 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1960 .start_trb = xhci->event_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
1961 .end_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
1962 .input_dma = xhci->cmd_ring->first_seg->dma, in xhci_check_trb_in_td_math()
1966 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1967 .start_trb = xhci->event_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
1968 .end_trb = &xhci->cmd_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
1969 .input_dma = xhci->cmd_ring->first_seg->dma, in xhci_check_trb_in_td_math()
1973 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1974 .start_trb = xhci->cmd_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
1975 .end_trb = &xhci->cmd_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
1976 .input_dma = xhci->cmd_ring->first_seg->dma, in xhci_check_trb_in_td_math()
1980 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1981 .start_trb = &xhci->event_ring->first_seg->trbs[0], in xhci_check_trb_in_td_math()
1982 .end_trb = &xhci->event_ring->first_seg->trbs[3], in xhci_check_trb_in_td_math()
1983 .input_dma = xhci->event_ring->first_seg->dma + 4*16, in xhci_check_trb_in_td_math()
1987 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1988 .start_trb = &xhci->event_ring->first_seg->trbs[3], in xhci_check_trb_in_td_math()
1989 .end_trb = &xhci->event_ring->first_seg->trbs[6], in xhci_check_trb_in_td_math()
1990 .input_dma = xhci->event_ring->first_seg->dma + 2*16, in xhci_check_trb_in_td_math()
1994 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
1995 .start_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 3], in xhci_check_trb_in_td_math()
1996 .end_trb = &xhci->event_ring->first_seg->trbs[1], in xhci_check_trb_in_td_math()
1997 .input_dma = xhci->event_ring->first_seg->dma + 2*16, in xhci_check_trb_in_td_math()
2001 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
2002 .start_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 3], in xhci_check_trb_in_td_math()
2003 .end_trb = &xhci->event_ring->first_seg->trbs[1], in xhci_check_trb_in_td_math()
2004 .input_dma = xhci->event_ring->first_seg->dma + (TRBS_PER_SEGMENT - 4)*16, in xhci_check_trb_in_td_math()
2008 { .input_seg = xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
2009 .start_trb = &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 3], in xhci_check_trb_in_td_math()
2010 .end_trb = &xhci->event_ring->first_seg->trbs[1], in xhci_check_trb_in_td_math()
2011 .input_dma = xhci->cmd_ring->first_seg->dma + 2*16, in xhci_check_trb_in_td_math()
2021 ret = xhci_test_trb_in_td(xhci, in xhci_check_trb_in_td_math()
2022 xhci->event_ring->first_seg, in xhci_check_trb_in_td_math()
2023 xhci->event_ring->first_seg->trbs, in xhci_check_trb_in_td_math()
2024 &xhci->event_ring->first_seg->trbs[TRBS_PER_SEGMENT - 1], in xhci_check_trb_in_td_math()
2034 ret = xhci_test_trb_in_td(xhci, in xhci_check_trb_in_td_math()
2044 xhci_dbg(xhci, "TRB math tests passed.\n"); in xhci_check_trb_in_td_math()
2048 static void xhci_set_hc_event_deq(struct xhci_hcd *xhci) in xhci_set_hc_event_deq() argument
2053 deq = xhci_trb_virt_to_dma(xhci->event_ring->deq_seg, in xhci_set_hc_event_deq()
2054 xhci->event_ring->dequeue); in xhci_set_hc_event_deq()
2056 xhci_warn(xhci, "WARN something wrong with SW event ring " in xhci_set_hc_event_deq()
2059 temp = xhci_read_64(xhci, &xhci->ir_set->erst_dequeue); in xhci_set_hc_event_deq()
2065 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_set_hc_event_deq()
2068 xhci_write_64(xhci, ((u64) deq & (u64) ~ERST_PTR_MASK) | temp, in xhci_set_hc_event_deq()
2069 &xhci->ir_set->erst_dequeue); in xhci_set_hc_event_deq()
2072 static void xhci_add_in_port(struct xhci_hcd *xhci, unsigned int num_ports, in xhci_add_in_port() argument
2082 rhub = &xhci->usb3_rhub; in xhci_add_in_port()
2084 rhub = &xhci->usb2_rhub; in xhci_add_in_port()
2086 xhci_warn(xhci, "Ignoring unknown port speed, " in xhci_add_in_port()
2099 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2126 xhci_dbg(xhci, "PSIV:%d PSIE:%d PLT:%d PFD:%d LP:%d PSIM:%d\n", in xhci_add_in_port()
2136 if (major_revision < 0x03 && xhci->num_ext_caps < max_caps) in xhci_add_in_port()
2137 xhci->ext_caps[xhci->num_ext_caps++] = temp; in xhci_add_in_port()
2140 if ((xhci->hci_version == 0x96) && (major_revision != 0x03) && in xhci_add_in_port()
2142 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2144 xhci->sw_lpm_support = 1; in xhci_add_in_port()
2147 if ((xhci->hci_version >= 0x100) && (major_revision != 0x03)) { in xhci_add_in_port()
2148 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2150 xhci->sw_lpm_support = 1; in xhci_add_in_port()
2152 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2154 xhci->hw_lpm_support = 1; in xhci_add_in_port()
2161 if (xhci->port_array[i] != 0) { in xhci_add_in_port()
2162 xhci_warn(xhci, "Duplicate port entry, Ext Cap %p," in xhci_add_in_port()
2164 xhci_warn(xhci, "Port was marked as USB %u, " in xhci_add_in_port()
2166 xhci->port_array[i], major_revision); in xhci_add_in_port()
2170 if (xhci->port_array[i] != major_revision && in xhci_add_in_port()
2171 xhci->port_array[i] != DUPLICATE_ENTRY) { in xhci_add_in_port()
2172 if (xhci->port_array[i] == 0x03) in xhci_add_in_port()
2173 xhci->num_usb3_ports--; in xhci_add_in_port()
2175 xhci->num_usb2_ports--; in xhci_add_in_port()
2176 xhci->port_array[i] = DUPLICATE_ENTRY; in xhci_add_in_port()
2181 xhci->port_array[i] = major_revision; in xhci_add_in_port()
2183 xhci->num_usb3_ports++; in xhci_add_in_port()
2185 xhci->num_usb2_ports++; in xhci_add_in_port()
2197 static int xhci_setup_port_arrays(struct xhci_hcd *xhci, gfp_t flags) in xhci_setup_port_arrays() argument
2205 addr = &xhci->cap_regs->hcc_params; in xhci_setup_port_arrays()
2208 xhci_err(xhci, "No Extended Capability registers, " in xhci_setup_port_arrays()
2213 num_ports = HCS_MAX_PORTS(xhci->hcs_params1); in xhci_setup_port_arrays()
2214 xhci->port_array = kzalloc(sizeof(*xhci->port_array)*num_ports, flags); in xhci_setup_port_arrays()
2215 if (!xhci->port_array) in xhci_setup_port_arrays()
2218 xhci->rh_bw = kzalloc(sizeof(*xhci->rh_bw)*num_ports, flags); in xhci_setup_port_arrays()
2219 if (!xhci->rh_bw) in xhci_setup_port_arrays()
2224 INIT_LIST_HEAD(&xhci->rh_bw[i].tts); in xhci_setup_port_arrays()
2225 bw_table = &xhci->rh_bw[i].bw_table; in xhci_setup_port_arrays()
2235 addr = &xhci->cap_regs->hc_capbase + offset; in xhci_setup_port_arrays()
2250 xhci->ext_caps = kzalloc(sizeof(*xhci->ext_caps) * cap_count, flags); in xhci_setup_port_arrays()
2251 if (!xhci->ext_caps) in xhci_setup_port_arrays()
2259 xhci_add_in_port(xhci, num_ports, addr, in xhci_setup_port_arrays()
2263 if (!offset || (xhci->num_usb2_ports + xhci->num_usb3_ports) in xhci_setup_port_arrays()
2273 if (xhci->num_usb2_ports == 0 && xhci->num_usb3_ports == 0) { in xhci_setup_port_arrays()
2274 xhci_warn(xhci, "No ports on the roothubs?\n"); in xhci_setup_port_arrays()
2277 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2279 xhci->num_usb2_ports, xhci->num_usb3_ports); in xhci_setup_port_arrays()
2284 if (xhci->num_usb3_ports > 15) { in xhci_setup_port_arrays()
2285 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2287 xhci->num_usb3_ports = 15; in xhci_setup_port_arrays()
2289 if (xhci->num_usb2_ports > USB_MAXCHILDREN) { in xhci_setup_port_arrays()
2290 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2293 xhci->num_usb2_ports = USB_MAXCHILDREN; in xhci_setup_port_arrays()
2300 if (xhci->num_usb2_ports) { in xhci_setup_port_arrays()
2301 xhci->usb2_ports = kmalloc(sizeof(*xhci->usb2_ports)* in xhci_setup_port_arrays()
2302 xhci->num_usb2_ports, flags); in xhci_setup_port_arrays()
2303 if (!xhci->usb2_ports) in xhci_setup_port_arrays()
2308 if (xhci->port_array[i] == 0x03 || in xhci_setup_port_arrays()
2309 xhci->port_array[i] == 0 || in xhci_setup_port_arrays()
2310 xhci->port_array[i] == DUPLICATE_ENTRY) in xhci_setup_port_arrays()
2313 xhci->usb2_ports[port_index] = in xhci_setup_port_arrays()
2314 &xhci->op_regs->port_status_base + in xhci_setup_port_arrays()
2316 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2319 xhci->usb2_ports[port_index]); in xhci_setup_port_arrays()
2321 if (port_index == xhci->num_usb2_ports) in xhci_setup_port_arrays()
2325 if (xhci->num_usb3_ports) { in xhci_setup_port_arrays()
2326 xhci->usb3_ports = kmalloc(sizeof(*xhci->usb3_ports)* in xhci_setup_port_arrays()
2327 xhci->num_usb3_ports, flags); in xhci_setup_port_arrays()
2328 if (!xhci->usb3_ports) in xhci_setup_port_arrays()
2333 if (xhci->port_array[i] == 0x03) { in xhci_setup_port_arrays()
2334 xhci->usb3_ports[port_index] = in xhci_setup_port_arrays()
2335 &xhci->op_regs->port_status_base + in xhci_setup_port_arrays()
2337 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2340 xhci->usb3_ports[port_index]); in xhci_setup_port_arrays()
2342 if (port_index == xhci->num_usb3_ports) in xhci_setup_port_arrays()
2349 int xhci_mem_init(struct xhci_hcd *xhci, gfp_t flags) in xhci_mem_init() argument
2352 struct device *dev = xhci_to_hcd(xhci)->self.controller; in xhci_mem_init()
2359 INIT_LIST_HEAD(&xhci->cmd_list); in xhci_mem_init()
2362 setup_timer(&xhci->cmd_timer, xhci_handle_command_timeout, in xhci_mem_init()
2363 (unsigned long)xhci); in xhci_mem_init()
2365 page_size = readl(&xhci->op_regs->page_size); in xhci_mem_init()
2366 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2374 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2377 xhci_warn(xhci, "WARN: no supported page size\n"); in xhci_mem_init()
2379 xhci->page_shift = 12; in xhci_mem_init()
2380 xhci->page_size = 1 << xhci->page_shift; in xhci_mem_init()
2381 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2382 "HCD page size set to %iK", xhci->page_size / 1024); in xhci_mem_init()
2388 val = HCS_MAX_SLOTS(readl(&xhci->cap_regs->hcs_params1)); in xhci_mem_init()
2389 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2391 val2 = readl(&xhci->op_regs->config_reg); in xhci_mem_init()
2393 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2395 writel(val, &xhci->op_regs->config_reg); in xhci_mem_init()
2401 xhci->dcbaa = dma_alloc_coherent(dev, sizeof(*xhci->dcbaa), &dma, in xhci_mem_init()
2403 if (!xhci->dcbaa) in xhci_mem_init()
2405 memset(xhci->dcbaa, 0, sizeof *(xhci->dcbaa)); in xhci_mem_init()
2406 xhci->dcbaa->dma = dma; in xhci_mem_init()
2407 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2409 (unsigned long long)xhci->dcbaa->dma, xhci->dcbaa); in xhci_mem_init()
2410 xhci_write_64(xhci, dma, &xhci->op_regs->dcbaa_ptr); in xhci_mem_init()
2419 xhci->segment_pool = dma_pool_create("xHCI ring segments", dev, in xhci_mem_init()
2420 TRB_SEGMENT_SIZE, TRB_SEGMENT_SIZE, xhci->page_size); in xhci_mem_init()
2423 xhci->device_pool = dma_pool_create("xHCI input/output contexts", dev, in xhci_mem_init()
2424 2112, 64, xhci->page_size); in xhci_mem_init()
2425 if (!xhci->segment_pool || !xhci->device_pool) in xhci_mem_init()
2431 xhci->small_streams_pool = in xhci_mem_init()
2434 xhci->medium_streams_pool = in xhci_mem_init()
2441 if (!xhci->small_streams_pool || !xhci->medium_streams_pool) in xhci_mem_init()
2445 xhci->cmd_ring = xhci_ring_alloc(xhci, 1, 1, TYPE_COMMAND, flags); in xhci_mem_init()
2446 if (!xhci->cmd_ring) in xhci_mem_init()
2448 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2449 "Allocated command ring at %p", xhci->cmd_ring); in xhci_mem_init()
2450 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "First segment DMA is 0x%llx", in xhci_mem_init()
2451 (unsigned long long)xhci->cmd_ring->first_seg->dma); in xhci_mem_init()
2454 val_64 = xhci_read_64(xhci, &xhci->op_regs->cmd_ring); in xhci_mem_init()
2456 (xhci->cmd_ring->first_seg->dma & (u64) ~CMD_RING_RSVD_BITS) | in xhci_mem_init()
2457 xhci->cmd_ring->cycle_state; in xhci_mem_init()
2458 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2460 xhci_write_64(xhci, val_64, &xhci->op_regs->cmd_ring); in xhci_mem_init()
2461 xhci_dbg_cmd_ptrs(xhci); in xhci_mem_init()
2463 xhci->lpm_command = xhci_alloc_command(xhci, true, true, flags); in xhci_mem_init()
2464 if (!xhci->lpm_command) in xhci_mem_init()
2471 xhci->cmd_ring_reserved_trbs++; in xhci_mem_init()
2473 val = readl(&xhci->cap_regs->db_off); in xhci_mem_init()
2475 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2478 xhci->dba = (void __iomem *) xhci->cap_regs + val; in xhci_mem_init()
2479 xhci_dbg_regs(xhci); in xhci_mem_init()
2480 xhci_print_run_regs(xhci); in xhci_mem_init()
2482 xhci->ir_set = &xhci->run_regs->ir_set[0]; in xhci_mem_init()
2488 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "// Allocating event ring"); in xhci_mem_init()
2489 xhci->event_ring = xhci_ring_alloc(xhci, ERST_NUM_SEGS, 1, TYPE_EVENT, in xhci_mem_init()
2491 if (!xhci->event_ring) in xhci_mem_init()
2493 if (xhci_check_trb_in_td_math(xhci) < 0) in xhci_mem_init()
2496 xhci->erst.entries = dma_alloc_coherent(dev, in xhci_mem_init()
2499 if (!xhci->erst.entries) in xhci_mem_init()
2501 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2505 memset(xhci->erst.entries, 0, sizeof(struct xhci_erst_entry)*ERST_NUM_SEGS); in xhci_mem_init()
2506 xhci->erst.num_entries = ERST_NUM_SEGS; in xhci_mem_init()
2507 xhci->erst.erst_dma_addr = dma; in xhci_mem_init()
2508 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2510 xhci->erst.num_entries, in xhci_mem_init()
2511 xhci->erst.entries, in xhci_mem_init()
2512 (unsigned long long)xhci->erst.erst_dma_addr); in xhci_mem_init()
2515 for (val = 0, seg = xhci->event_ring->first_seg; val < ERST_NUM_SEGS; val++) { in xhci_mem_init()
2516 struct xhci_erst_entry *entry = &xhci->erst.entries[val]; in xhci_mem_init()
2524 val = readl(&xhci->ir_set->erst_size); in xhci_mem_init()
2527 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2530 writel(val, &xhci->ir_set->erst_size); in xhci_mem_init()
2532 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2535 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2537 (unsigned long long)xhci->erst.erst_dma_addr); in xhci_mem_init()
2538 val_64 = xhci_read_64(xhci, &xhci->ir_set->erst_base); in xhci_mem_init()
2540 val_64 |= (xhci->erst.erst_dma_addr & (u64) ~ERST_PTR_MASK); in xhci_mem_init()
2541 xhci_write_64(xhci, val_64, &xhci->ir_set->erst_base); in xhci_mem_init()
2544 xhci_set_hc_event_deq(xhci); in xhci_mem_init()
2545 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2547 xhci_print_ir_set(xhci, 0); in xhci_mem_init()
2554 init_completion(&xhci->addr_dev); in xhci_mem_init()
2556 xhci->devs[i] = NULL; in xhci_mem_init()
2558 xhci->bus_state[0].resume_done[i] = 0; in xhci_mem_init()
2559 xhci->bus_state[1].resume_done[i] = 0; in xhci_mem_init()
2561 init_completion(&xhci->bus_state[1].rexit_done[i]); in xhci_mem_init()
2564 if (scratchpad_alloc(xhci, flags)) in xhci_mem_init()
2566 if (xhci_setup_port_arrays(xhci, flags)) in xhci_mem_init()
2573 temp = readl(&xhci->op_regs->dev_notification); in xhci_mem_init()
2576 writel(temp, &xhci->op_regs->dev_notification); in xhci_mem_init()
2581 xhci_warn(xhci, "Couldn't initialize memory\n"); in xhci_mem_init()
2582 xhci_halt(xhci); in xhci_mem_init()
2583 xhci_reset(xhci); in xhci_mem_init()
2584 xhci_mem_cleanup(xhci); in xhci_mem_init()