Lines Matching refs:ioat

47 static void ioat1_cleanup(struct ioat_dma_chan *ioat);
48 static void ioat1_dma_start_null_desc(struct ioat_dma_chan *ioat);
128 struct ioat_dma_chan *ioat; in ioat1_enumerate_channels() local
150 ioat = devm_kzalloc(dev, sizeof(*ioat), GFP_KERNEL); in ioat1_enumerate_channels()
151 if (!ioat) in ioat1_enumerate_channels()
154 ioat_init_channel(device, &ioat->base, i); in ioat1_enumerate_channels()
155 ioat->xfercap = xfercap; in ioat1_enumerate_channels()
156 spin_lock_init(&ioat->desc_lock); in ioat1_enumerate_channels()
157 INIT_LIST_HEAD(&ioat->free_desc); in ioat1_enumerate_channels()
158 INIT_LIST_HEAD(&ioat->used_desc); in ioat1_enumerate_channels()
170 __ioat1_dma_memcpy_issue_pending(struct ioat_dma_chan *ioat) in __ioat1_dma_memcpy_issue_pending() argument
172 void __iomem *reg_base = ioat->base.reg_base; in __ioat1_dma_memcpy_issue_pending()
174 dev_dbg(to_dev(&ioat->base), "%s: pending: %d\n", in __ioat1_dma_memcpy_issue_pending()
175 __func__, ioat->pending); in __ioat1_dma_memcpy_issue_pending()
176 ioat->pending = 0; in __ioat1_dma_memcpy_issue_pending()
182 struct ioat_dma_chan *ioat = to_ioat_chan(chan); in ioat1_dma_memcpy_issue_pending() local
184 if (ioat->pending > 0) { in ioat1_dma_memcpy_issue_pending()
185 spin_lock_bh(&ioat->desc_lock); in ioat1_dma_memcpy_issue_pending()
186 __ioat1_dma_memcpy_issue_pending(ioat); in ioat1_dma_memcpy_issue_pending()
187 spin_unlock_bh(&ioat->desc_lock); in ioat1_dma_memcpy_issue_pending()
195 static void ioat1_reset_channel(struct ioat_dma_chan *ioat) in ioat1_reset_channel() argument
197 struct ioat_chan_common *chan = &ioat->base; in ioat1_reset_channel()
219 ioat->pending = INT_MIN; in ioat1_reset_channel()
229 struct ioat_dma_chan *ioat = to_ioat_chan(c); in ioat1_tx_submit() local
231 struct ioat_chan_common *chan = &ioat->base; in ioat1_tx_submit()
236 spin_lock_bh(&ioat->desc_lock); in ioat1_tx_submit()
239 dev_dbg(to_dev(&ioat->base), "%s: cookie: %d\n", __func__, cookie); in ioat1_tx_submit()
243 chain_tail = to_ioat_desc(ioat->used_desc.prev); in ioat1_tx_submit()
247 list_splice_tail_init(&desc->tx_list, &ioat->used_desc); in ioat1_tx_submit()
248 dump_desc_dbg(ioat, chain_tail); in ioat1_tx_submit()
249 dump_desc_dbg(ioat, first); in ioat1_tx_submit()
254 ioat->active += desc->hw->tx_cnt; in ioat1_tx_submit()
255 ioat->pending += desc->hw->tx_cnt; in ioat1_tx_submit()
256 if (ioat->pending >= ioat_pending_level) in ioat1_tx_submit()
257 __ioat1_dma_memcpy_issue_pending(ioat); in ioat1_tx_submit()
258 spin_unlock_bh(&ioat->desc_lock); in ioat1_tx_submit()
269 ioat_dma_alloc_descriptor(struct ioat_dma_chan *ioat, gfp_t flags) in ioat_dma_alloc_descriptor() argument
276 ioatdma_device = ioat->base.device; in ioat_dma_alloc_descriptor()
290 dma_async_tx_descriptor_init(&desc_sw->txd, &ioat->base.common); in ioat_dma_alloc_descriptor()
309 struct ioat_dma_chan *ioat = to_ioat_chan(c); in ioat1_dma_alloc_chan_resources() local
310 struct ioat_chan_common *chan = &ioat->base; in ioat1_dma_alloc_chan_resources()
317 if (!list_empty(&ioat->free_desc)) in ioat1_dma_alloc_chan_resources()
318 return ioat->desccount; in ioat1_dma_alloc_chan_resources()
331 desc = ioat_dma_alloc_descriptor(ioat, GFP_KERNEL); in ioat1_dma_alloc_chan_resources()
339 spin_lock_bh(&ioat->desc_lock); in ioat1_dma_alloc_chan_resources()
340 ioat->desccount = i; in ioat1_dma_alloc_chan_resources()
341 list_splice(&tmp_list, &ioat->free_desc); in ioat1_dma_alloc_chan_resources()
342 spin_unlock_bh(&ioat->desc_lock); in ioat1_dma_alloc_chan_resources()
355 ioat1_dma_start_null_desc(ioat); /* give chain to dma device */ in ioat1_dma_alloc_chan_resources()
357 __func__, ioat->desccount); in ioat1_dma_alloc_chan_resources()
358 return ioat->desccount; in ioat1_dma_alloc_chan_resources()
403 struct ioat_dma_chan *ioat = to_ioat_chan(c); in ioat1_dma_free_chan_resources() local
404 struct ioat_chan_common *chan = &ioat->base; in ioat1_dma_free_chan_resources()
412 if (ioat->desccount == 0) in ioat1_dma_free_chan_resources()
424 spin_lock_bh(&ioat->desc_lock); in ioat1_dma_free_chan_resources()
425 list_for_each_entry_safe(desc, _desc, &ioat->used_desc, node) { in ioat1_dma_free_chan_resources()
428 dump_desc_dbg(ioat, desc); in ioat1_dma_free_chan_resources()
436 &ioat->free_desc, node) { in ioat1_dma_free_chan_resources()
442 spin_unlock_bh(&ioat->desc_lock); in ioat1_dma_free_chan_resources()
455 ioat->pending = 0; in ioat1_dma_free_chan_resources()
456 ioat->desccount = 0; in ioat1_dma_free_chan_resources()
468 ioat1_dma_get_next_descriptor(struct ioat_dma_chan *ioat) in ioat1_dma_get_next_descriptor() argument
472 if (!list_empty(&ioat->free_desc)) { in ioat1_dma_get_next_descriptor()
473 new = to_ioat_desc(ioat->free_desc.next); in ioat1_dma_get_next_descriptor()
477 new = ioat_dma_alloc_descriptor(ioat, GFP_ATOMIC); in ioat1_dma_get_next_descriptor()
479 dev_err(to_dev(&ioat->base), "alloc failed\n"); in ioat1_dma_get_next_descriptor()
483 dev_dbg(to_dev(&ioat->base), "%s: allocated: %d\n", in ioat1_dma_get_next_descriptor()
493 struct ioat_dma_chan *ioat = to_ioat_chan(c); in ioat1_dma_prep_memcpy() local
503 spin_lock_bh(&ioat->desc_lock); in ioat1_dma_prep_memcpy()
504 desc = ioat1_dma_get_next_descriptor(ioat); in ioat1_dma_prep_memcpy()
510 copy = min_t(size_t, len, ioat->xfercap); in ioat1_dma_prep_memcpy()
527 next = ioat1_dma_get_next_descriptor(ioat); in ioat1_dma_prep_memcpy()
529 dump_desc_dbg(ioat, desc); in ioat1_dma_prep_memcpy()
536 struct ioat_chan_common *chan = &ioat->base; in ioat1_dma_prep_memcpy()
540 list_splice(&chain, &ioat->free_desc); in ioat1_dma_prep_memcpy()
541 spin_unlock_bh(&ioat->desc_lock); in ioat1_dma_prep_memcpy()
544 spin_unlock_bh(&ioat->desc_lock); in ioat1_dma_prep_memcpy()
552 dump_desc_dbg(ioat, desc); in ioat1_dma_prep_memcpy()
559 struct ioat_dma_chan *ioat = to_ioat_chan((void *) data); in ioat1_cleanup_event() local
560 struct ioat_chan_common *chan = &ioat->base; in ioat1_cleanup_event()
562 ioat1_cleanup(ioat); in ioat1_cleanup_event()
565 writew(IOAT_CHANCTRL_RUN, ioat->base.reg_base + IOAT_CHANCTRL_OFFSET); in ioat1_cleanup_event()
602 static void __cleanup(struct ioat_dma_chan *ioat, dma_addr_t phys_complete) in __cleanup() argument
604 struct ioat_chan_common *chan = &ioat->base; in __cleanup()
610 list_for_each_safe(_desc, n, &ioat->used_desc) { in __cleanup()
621 dump_desc_dbg(ioat, desc); in __cleanup()
625 ioat->active -= desc->hw->tx_cnt; in __cleanup()
638 list_move_tail(&desc->node, &ioat->free_desc); in __cleanup()
648 if (n == &ioat->used_desc) { in __cleanup()
671 static void ioat1_cleanup(struct ioat_dma_chan *ioat) in ioat1_cleanup() argument
673 struct ioat_chan_common *chan = &ioat->base; in ioat1_cleanup()
686 if (!spin_trylock_bh(&ioat->desc_lock)) { in ioat1_cleanup()
691 __cleanup(ioat, phys_complete); in ioat1_cleanup()
693 spin_unlock_bh(&ioat->desc_lock); in ioat1_cleanup()
699 struct ioat_dma_chan *ioat = to_ioat_chan((void *) data); in ioat1_timer_event() local
700 struct ioat_chan_common *chan = &ioat->base; in ioat1_timer_event()
708 spin_lock_bh(&ioat->desc_lock); in ioat1_timer_event()
711 desc = to_ioat_desc(ioat->used_desc.prev); in ioat1_timer_event()
712 ioat_set_chainaddr(ioat, desc->txd.phys); in ioat1_timer_event()
715 ioat->pending = 0; in ioat1_timer_event()
718 spin_unlock_bh(&ioat->desc_lock); in ioat1_timer_event()
722 spin_lock_bh(&ioat->desc_lock); in ioat1_timer_event()
728 __cleanup(ioat, phys_complete); in ioat1_timer_event()
730 ioat1_reset_channel(ioat); in ioat1_timer_event()
741 spin_unlock_bh(&ioat->desc_lock); in ioat1_timer_event()
763 static void ioat1_dma_start_null_desc(struct ioat_dma_chan *ioat) in ioat1_dma_start_null_desc() argument
765 struct ioat_chan_common *chan = &ioat->base; in ioat1_dma_start_null_desc()
769 spin_lock_bh(&ioat->desc_lock); in ioat1_dma_start_null_desc()
771 desc = ioat1_dma_get_next_descriptor(ioat); in ioat1_dma_start_null_desc()
776 spin_unlock_bh(&ioat->desc_lock); in ioat1_dma_start_null_desc()
791 list_add_tail(&desc->node, &ioat->used_desc); in ioat1_dma_start_null_desc()
792 dump_desc_dbg(ioat, desc); in ioat1_dma_start_null_desc()
794 ioat_set_chainaddr(ioat, desc->txd.phys); in ioat1_dma_start_null_desc()
796 spin_unlock_bh(&ioat->desc_lock); in ioat1_dma_start_null_desc()
1096 struct ioat_dma_chan *ioat = to_ioat_chan(c); in ring_size_show() local
1098 return sprintf(page, "%d\n", ioat->desccount); in ring_size_show()
1104 struct ioat_dma_chan *ioat = to_ioat_chan(c); in ring_active_show() local
1106 return sprintf(page, "%d\n", ioat->active); in ring_active_show()