Lines Matching refs:ioat

52 void __ioat2_issue_pending(struct ioat2_dma_chan *ioat)  in __ioat2_issue_pending()  argument
54 struct ioat_chan_common *chan = &ioat->base; in __ioat2_issue_pending()
56 ioat->dmacount += ioat2_ring_pending(ioat); in __ioat2_issue_pending()
57 ioat->issued = ioat->head; in __ioat2_issue_pending()
58 writew(ioat->dmacount, chan->reg_base + IOAT_CHAN_DMACOUNT_OFFSET); in __ioat2_issue_pending()
61 __func__, ioat->head, ioat->tail, ioat->issued, ioat->dmacount); in __ioat2_issue_pending()
66 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); in ioat2_issue_pending() local
68 if (ioat2_ring_pending(ioat)) { in ioat2_issue_pending()
69 spin_lock_bh(&ioat->prep_lock); in ioat2_issue_pending()
70 __ioat2_issue_pending(ioat); in ioat2_issue_pending()
71 spin_unlock_bh(&ioat->prep_lock); in ioat2_issue_pending()
82 static void ioat2_update_pending(struct ioat2_dma_chan *ioat) in ioat2_update_pending() argument
84 if (ioat2_ring_pending(ioat) > ioat_pending_level) in ioat2_update_pending()
85 __ioat2_issue_pending(ioat); in ioat2_update_pending()
88 static void __ioat2_start_null_desc(struct ioat2_dma_chan *ioat) in __ioat2_start_null_desc() argument
93 if (ioat2_ring_space(ioat) < 1) { in __ioat2_start_null_desc()
94 dev_err(to_dev(&ioat->base), in __ioat2_start_null_desc()
99 dev_dbg(to_dev(&ioat->base), "%s: head: %#x tail: %#x issued: %#x\n", in __ioat2_start_null_desc()
100 __func__, ioat->head, ioat->tail, ioat->issued); in __ioat2_start_null_desc()
101 desc = ioat2_get_ring_ent(ioat, ioat->head); in __ioat2_start_null_desc()
113 ioat2_set_chainaddr(ioat, desc->txd.phys); in __ioat2_start_null_desc()
114 dump_desc_dbg(ioat, desc); in __ioat2_start_null_desc()
116 ioat->head += 1; in __ioat2_start_null_desc()
117 __ioat2_issue_pending(ioat); in __ioat2_start_null_desc()
120 static void ioat2_start_null_desc(struct ioat2_dma_chan *ioat) in ioat2_start_null_desc() argument
122 spin_lock_bh(&ioat->prep_lock); in ioat2_start_null_desc()
123 __ioat2_start_null_desc(ioat); in ioat2_start_null_desc()
124 spin_unlock_bh(&ioat->prep_lock); in ioat2_start_null_desc()
127 static void __cleanup(struct ioat2_dma_chan *ioat, dma_addr_t phys_complete) in __cleanup() argument
129 struct ioat_chan_common *chan = &ioat->base; in __cleanup()
134 int idx = ioat->tail, i; in __cleanup()
137 __func__, ioat->head, ioat->tail, ioat->issued); in __cleanup()
139 active = ioat2_ring_active(ioat); in __cleanup()
142 prefetch(ioat2_get_ring_ent(ioat, idx + i + 1)); in __cleanup()
143 desc = ioat2_get_ring_ent(ioat, idx + i); in __cleanup()
145 dump_desc_dbg(ioat, desc); in __cleanup()
159 ioat->tail = idx + i; in __cleanup()
175 static void ioat2_cleanup(struct ioat2_dma_chan *ioat) in ioat2_cleanup() argument
177 struct ioat_chan_common *chan = &ioat->base; in ioat2_cleanup()
182 __cleanup(ioat, phys_complete); in ioat2_cleanup()
188 struct ioat2_dma_chan *ioat = to_ioat2_chan((void *) data); in ioat2_cleanup_event() local
189 struct ioat_chan_common *chan = &ioat->base; in ioat2_cleanup_event()
191 ioat2_cleanup(ioat); in ioat2_cleanup_event()
194 writew(IOAT_CHANCTRL_RUN, ioat->base.reg_base + IOAT_CHANCTRL_OFFSET); in ioat2_cleanup_event()
197 void __ioat2_restart_chan(struct ioat2_dma_chan *ioat) in __ioat2_restart_chan() argument
199 struct ioat_chan_common *chan = &ioat->base; in __ioat2_restart_chan()
202 ioat->issued = ioat->tail; in __ioat2_restart_chan()
203 ioat->dmacount = 0; in __ioat2_restart_chan()
209 __func__, ioat->head, ioat->tail, ioat->issued, ioat->dmacount); in __ioat2_restart_chan()
211 if (ioat2_ring_pending(ioat)) { in __ioat2_restart_chan()
214 desc = ioat2_get_ring_ent(ioat, ioat->tail); in __ioat2_restart_chan()
215 ioat2_set_chainaddr(ioat, desc->txd.phys); in __ioat2_restart_chan()
216 __ioat2_issue_pending(ioat); in __ioat2_restart_chan()
218 __ioat2_start_null_desc(ioat); in __ioat2_restart_chan()
259 static void ioat2_restart_channel(struct ioat2_dma_chan *ioat) in ioat2_restart_channel() argument
261 struct ioat_chan_common *chan = &ioat->base; in ioat2_restart_channel()
266 __cleanup(ioat, phys_complete); in ioat2_restart_channel()
268 __ioat2_restart_chan(ioat); in ioat2_restart_channel()
271 static void check_active(struct ioat2_dma_chan *ioat) in check_active() argument
273 struct ioat_chan_common *chan = &ioat->base; in check_active()
275 if (ioat2_ring_active(ioat)) { in check_active()
282 else if (ioat->alloc_order > ioat_get_alloc_order()) { in check_active()
286 reshape_ring(ioat, ioat->alloc_order - 1); in check_active()
291 if (ioat->alloc_order > ioat_get_alloc_order()) in check_active()
299 struct ioat2_dma_chan *ioat = to_ioat2_chan((void *) data); in ioat2_timer_event() local
300 struct ioat_chan_common *chan = &ioat->base; in ioat2_timer_event()
327 __cleanup(ioat, phys_complete); in ioat2_timer_event()
329 spin_lock_bh(&ioat->prep_lock); in ioat2_timer_event()
330 ioat2_restart_channel(ioat); in ioat2_timer_event()
331 spin_unlock_bh(&ioat->prep_lock); in ioat2_timer_event()
340 if (ioat2_ring_active(ioat)) in ioat2_timer_event()
343 spin_lock_bh(&ioat->prep_lock); in ioat2_timer_event()
344 check_active(ioat); in ioat2_timer_event()
345 spin_unlock_bh(&ioat->prep_lock); in ioat2_timer_event()
369 struct ioat2_dma_chan *ioat; in ioat2_enumerate_channels() local
395 ioat = devm_kzalloc(dev, sizeof(*ioat), GFP_KERNEL); in ioat2_enumerate_channels()
396 if (!ioat) in ioat2_enumerate_channels()
399 ioat_init_channel(device, &ioat->base, i); in ioat2_enumerate_channels()
400 ioat->xfercap_log = xfercap_log; in ioat2_enumerate_channels()
401 spin_lock_init(&ioat->prep_lock); in ioat2_enumerate_channels()
402 if (device->reset_hw(&ioat->base)) { in ioat2_enumerate_channels()
414 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); in ioat2_tx_submit_unlock() local
415 struct ioat_chan_common *chan = &ioat->base; in ioat2_tx_submit_unlock()
419 dev_dbg(to_dev(&ioat->base), "%s: cookie: %d\n", __func__, cookie); in ioat2_tx_submit_unlock()
430 ioat->head += ioat->produce; in ioat2_tx_submit_unlock()
432 ioat2_update_pending(ioat); in ioat2_tx_submit_unlock()
433 spin_unlock_bh(&ioat->prep_lock); in ioat2_tx_submit_unlock()
516 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); in ioat2_alloc_chan_resources() local
517 struct ioat_chan_common *chan = &ioat->base; in ioat2_alloc_chan_resources()
524 if (ioat->ring) in ioat2_alloc_chan_resources()
525 return 1 << ioat->alloc_order; in ioat2_alloc_chan_resources()
549 spin_lock_bh(&ioat->prep_lock); in ioat2_alloc_chan_resources()
550 ioat->ring = ring; in ioat2_alloc_chan_resources()
551 ioat->head = 0; in ioat2_alloc_chan_resources()
552 ioat->issued = 0; in ioat2_alloc_chan_resources()
553 ioat->tail = 0; in ioat2_alloc_chan_resources()
554 ioat->alloc_order = order; in ioat2_alloc_chan_resources()
556 spin_unlock_bh(&ioat->prep_lock); in ioat2_alloc_chan_resources()
559 ioat2_start_null_desc(ioat); in ioat2_alloc_chan_resources()
568 return 1 << ioat->alloc_order; in ioat2_alloc_chan_resources()
579 bool reshape_ring(struct ioat2_dma_chan *ioat, int order) in reshape_ring() argument
585 struct ioat_chan_common *chan = &ioat->base; in reshape_ring()
587 const u32 curr_size = ioat2_ring_size(ioat); in reshape_ring()
588 const u16 active = ioat2_ring_active(ioat); in reshape_ring()
615 u16 curr_idx = (ioat->tail+i) & (curr_size-1); in reshape_ring()
616 u16 new_idx = (ioat->tail+i) & (new_size-1); in reshape_ring()
618 ring[new_idx] = ioat->ring[curr_idx]; in reshape_ring()
624 u16 new_idx = (ioat->tail+i) & (new_size-1); in reshape_ring()
629 u16 new_idx = (ioat->tail+i) & (new_size-1); in reshape_ring()
641 u16 new_idx = (ioat->tail+i) & (new_size-1); in reshape_ring()
655 u16 curr_idx = (ioat->tail+i) & (curr_size-1); in reshape_ring()
656 u16 new_idx = (ioat->tail+i) & (new_size-1); in reshape_ring()
658 ring[new_idx] = ioat->ring[curr_idx]; in reshape_ring()
666 ent = ioat2_get_ring_ent(ioat, ioat->tail+i); in reshape_ring()
671 hw = ring[(ioat->tail+new_size-1) & (new_size-1)]->hw; in reshape_ring()
672 next = ring[(ioat->tail+new_size) & (new_size-1)]; in reshape_ring()
679 kfree(ioat->ring); in reshape_ring()
680 ioat->ring = ring; in reshape_ring()
681 ioat->alloc_order = order; in reshape_ring()
691 int ioat2_check_space_lock(struct ioat2_dma_chan *ioat, int num_descs) in ioat2_check_space_lock() argument
693 struct ioat_chan_common *chan = &ioat->base; in ioat2_check_space_lock()
697 spin_lock_bh(&ioat->prep_lock); in ioat2_check_space_lock()
702 if (likely(ioat2_ring_space(ioat) > num_descs)) { in ioat2_check_space_lock()
704 __func__, num_descs, ioat->head, ioat->tail, ioat->issued); in ioat2_check_space_lock()
705 ioat->produce = num_descs; in ioat2_check_space_lock()
709 spin_unlock_bh(&ioat->prep_lock); in ioat2_check_space_lock()
716 spin_lock_bh(&ioat->prep_lock); in ioat2_check_space_lock()
717 retry = reshape_ring(ioat, ioat->alloc_order + 1); in ioat2_check_space_lock()
719 spin_unlock_bh(&ioat->prep_lock); in ioat2_check_space_lock()
728 __func__, num_descs, ioat->head, ioat->tail, ioat->issued); in ioat2_check_space_lock()
749 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); in ioat2_dma_prep_memcpy_lock() local
757 num_descs = ioat2_xferlen_to_descs(ioat, len); in ioat2_dma_prep_memcpy_lock()
758 if (likely(num_descs) && ioat2_check_space_lock(ioat, num_descs) == 0) in ioat2_dma_prep_memcpy_lock()
759 idx = ioat->head; in ioat2_dma_prep_memcpy_lock()
764 size_t copy = min_t(size_t, len, 1 << ioat->xfercap_log); in ioat2_dma_prep_memcpy_lock()
766 desc = ioat2_get_ring_ent(ioat, idx + i); in ioat2_dma_prep_memcpy_lock()
777 dump_desc_dbg(ioat, desc); in ioat2_dma_prep_memcpy_lock()
785 dump_desc_dbg(ioat, desc); in ioat2_dma_prep_memcpy_lock()
797 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); in ioat2_free_chan_resources() local
798 struct ioat_chan_common *chan = &ioat->base; in ioat2_free_chan_resources()
801 const u16 total_descs = 1 << ioat->alloc_order; in ioat2_free_chan_resources()
808 if (!ioat->ring) in ioat2_free_chan_resources()
815 spin_lock_bh(&ioat->prep_lock); in ioat2_free_chan_resources()
816 descs = ioat2_ring_space(ioat); in ioat2_free_chan_resources()
819 desc = ioat2_get_ring_ent(ioat, ioat->head + i); in ioat2_free_chan_resources()
828 desc = ioat2_get_ring_ent(ioat, ioat->tail + i); in ioat2_free_chan_resources()
829 dump_desc_dbg(ioat, desc); in ioat2_free_chan_resources()
833 kfree(ioat->ring); in ioat2_free_chan_resources()
834 ioat->ring = NULL; in ioat2_free_chan_resources()
835 ioat->alloc_order = 0; in ioat2_free_chan_resources()
838 spin_unlock_bh(&ioat->prep_lock); in ioat2_free_chan_resources()
843 ioat->dmacount = 0; in ioat2_free_chan_resources()
848 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); in ring_size_show() local
850 return sprintf(page, "%d\n", (1 << ioat->alloc_order) & ~1); in ring_size_show()
856 struct ioat2_dma_chan *ioat = to_ioat2_chan(c); in ring_active_show() local
859 return sprintf(page, "%d\n", ioat2_ring_active(ioat)); in ring_active_show()