Lines Matching refs:ioat_chan

40 static void ioat_eh(struct ioatdma_chan *ioat_chan);
50 struct ioatdma_chan *ioat_chan; in ioat_dma_do_interrupt() local
67 ioat_chan = ioat_chan_by_index(instance, bit); in ioat_dma_do_interrupt()
68 if (test_bit(IOAT_RUN, &ioat_chan->state)) in ioat_dma_do_interrupt()
69 tasklet_schedule(&ioat_chan->cleanup_task); in ioat_dma_do_interrupt()
83 struct ioatdma_chan *ioat_chan = data; in ioat_dma_do_interrupt_msix() local
85 if (test_bit(IOAT_RUN, &ioat_chan->state)) in ioat_dma_do_interrupt_msix()
86 tasklet_schedule(&ioat_chan->cleanup_task); in ioat_dma_do_interrupt_msix()
91 void ioat_stop(struct ioatdma_chan *ioat_chan) in ioat_stop() argument
93 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_stop()
95 int chan_id = chan_num(ioat_chan); in ioat_stop()
101 clear_bit(IOAT_RUN, &ioat_chan->state); in ioat_stop()
118 del_timer_sync(&ioat_chan->timer); in ioat_stop()
121 tasklet_kill(&ioat_chan->cleanup_task); in ioat_stop()
124 ioat_cleanup_event((unsigned long)&ioat_chan->dma_chan); in ioat_stop()
127 static void __ioat_issue_pending(struct ioatdma_chan *ioat_chan) in __ioat_issue_pending() argument
129 ioat_chan->dmacount += ioat_ring_pending(ioat_chan); in __ioat_issue_pending()
130 ioat_chan->issued = ioat_chan->head; in __ioat_issue_pending()
131 writew(ioat_chan->dmacount, in __ioat_issue_pending()
132 ioat_chan->reg_base + IOAT_CHAN_DMACOUNT_OFFSET); in __ioat_issue_pending()
133 dev_dbg(to_dev(ioat_chan), in __ioat_issue_pending()
135 __func__, ioat_chan->head, ioat_chan->tail, in __ioat_issue_pending()
136 ioat_chan->issued, ioat_chan->dmacount); in __ioat_issue_pending()
141 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_issue_pending() local
143 if (ioat_ring_pending(ioat_chan)) { in ioat_issue_pending()
144 spin_lock_bh(&ioat_chan->prep_lock); in ioat_issue_pending()
145 __ioat_issue_pending(ioat_chan); in ioat_issue_pending()
146 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_issue_pending()
157 static void ioat_update_pending(struct ioatdma_chan *ioat_chan) in ioat_update_pending() argument
159 if (ioat_ring_pending(ioat_chan) > ioat_pending_level) in ioat_update_pending()
160 __ioat_issue_pending(ioat_chan); in ioat_update_pending()
163 static void __ioat_start_null_desc(struct ioatdma_chan *ioat_chan) in __ioat_start_null_desc() argument
168 if (ioat_ring_space(ioat_chan) < 1) { in __ioat_start_null_desc()
169 dev_err(to_dev(ioat_chan), in __ioat_start_null_desc()
174 dev_dbg(to_dev(ioat_chan), in __ioat_start_null_desc()
176 __func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued); in __ioat_start_null_desc()
177 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->head); in __ioat_start_null_desc()
189 ioat_set_chainaddr(ioat_chan, desc->txd.phys); in __ioat_start_null_desc()
190 dump_desc_dbg(ioat_chan, desc); in __ioat_start_null_desc()
193 ioat_chan->head += 1; in __ioat_start_null_desc()
194 __ioat_issue_pending(ioat_chan); in __ioat_start_null_desc()
197 void ioat_start_null_desc(struct ioatdma_chan *ioat_chan) in ioat_start_null_desc() argument
199 spin_lock_bh(&ioat_chan->prep_lock); in ioat_start_null_desc()
200 if (!test_bit(IOAT_CHAN_DOWN, &ioat_chan->state)) in ioat_start_null_desc()
201 __ioat_start_null_desc(ioat_chan); in ioat_start_null_desc()
202 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_start_null_desc()
205 static void __ioat_restart_chan(struct ioatdma_chan *ioat_chan) in __ioat_restart_chan() argument
208 ioat_chan->issued = ioat_chan->tail; in __ioat_restart_chan()
209 ioat_chan->dmacount = 0; in __ioat_restart_chan()
210 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in __ioat_restart_chan()
212 dev_dbg(to_dev(ioat_chan), in __ioat_restart_chan()
214 __func__, ioat_chan->head, ioat_chan->tail, in __ioat_restart_chan()
215 ioat_chan->issued, ioat_chan->dmacount); in __ioat_restart_chan()
217 if (ioat_ring_pending(ioat_chan)) { in __ioat_restart_chan()
220 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); in __ioat_restart_chan()
221 ioat_set_chainaddr(ioat_chan, desc->txd.phys); in __ioat_restart_chan()
222 __ioat_issue_pending(ioat_chan); in __ioat_restart_chan()
224 __ioat_start_null_desc(ioat_chan); in __ioat_restart_chan()
227 static int ioat_quiesce(struct ioatdma_chan *ioat_chan, unsigned long tmo) in ioat_quiesce() argument
233 status = ioat_chansts(ioat_chan); in ioat_quiesce()
235 ioat_suspend(ioat_chan); in ioat_quiesce()
241 status = ioat_chansts(ioat_chan); in ioat_quiesce()
248 static int ioat_reset_sync(struct ioatdma_chan *ioat_chan, unsigned long tmo) in ioat_reset_sync() argument
253 ioat_reset(ioat_chan); in ioat_reset_sync()
254 while (ioat_reset_pending(ioat_chan)) { in ioat_reset_sync()
266 __releases(&ioat_chan->prep_lock) in ioat_tx_submit_unlock()
269 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_tx_submit_unlock() local
273 dev_dbg(to_dev(ioat_chan), "%s: cookie: %d\n", __func__, cookie); in ioat_tx_submit_unlock()
275 if (!test_and_set_bit(IOAT_CHAN_ACTIVE, &ioat_chan->state)) in ioat_tx_submit_unlock()
276 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_tx_submit_unlock()
284 ioat_chan->head += ioat_chan->produce; in ioat_tx_submit_unlock()
286 ioat_update_pending(ioat_chan); in ioat_tx_submit_unlock()
287 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_tx_submit_unlock()
365 static bool reshape_ring(struct ioatdma_chan *ioat_chan, int order) in reshape_ring() argument
371 struct dma_chan *c = &ioat_chan->dma_chan; in reshape_ring()
372 const u32 curr_size = ioat_ring_size(ioat_chan); in reshape_ring()
373 const u16 active = ioat_ring_active(ioat_chan); in reshape_ring()
400 u16 curr_idx = (ioat_chan->tail+i) & (curr_size-1); in reshape_ring()
401 u16 new_idx = (ioat_chan->tail+i) & (new_size-1); in reshape_ring()
403 ring[new_idx] = ioat_chan->ring[curr_idx]; in reshape_ring()
409 u16 new_idx = (ioat_chan->tail+i) & (new_size-1); in reshape_ring()
414 u16 new_idx = (ioat_chan->tail+i) & in reshape_ring()
427 u16 new_idx = (ioat_chan->tail+i) & (new_size-1); in reshape_ring()
442 u16 curr_idx = (ioat_chan->tail+i) & (curr_size-1); in reshape_ring()
443 u16 new_idx = (ioat_chan->tail+i) & (new_size-1); in reshape_ring()
445 ring[new_idx] = ioat_chan->ring[curr_idx]; in reshape_ring()
453 ent = ioat_get_ring_ent(ioat_chan, ioat_chan->tail+i); in reshape_ring()
458 hw = ring[(ioat_chan->tail+new_size-1) & (new_size-1)]->hw; in reshape_ring()
459 next = ring[(ioat_chan->tail+new_size) & (new_size-1)]; in reshape_ring()
463 dev_dbg(to_dev(ioat_chan), "%s: allocated %d descriptors\n", in reshape_ring()
466 kfree(ioat_chan->ring); in reshape_ring()
467 ioat_chan->ring = ring; in reshape_ring()
468 ioat_chan->alloc_order = order; in reshape_ring()
478 int ioat_check_space_lock(struct ioatdma_chan *ioat_chan, int num_descs) in ioat_check_space_lock() argument
479 __acquires(&ioat_chan->prep_lock) in ioat_check_space_lock()
484 spin_lock_bh(&ioat_chan->prep_lock); in ioat_check_space_lock()
489 if (likely(ioat_ring_space(ioat_chan) > num_descs)) { in ioat_check_space_lock()
490 dev_dbg(to_dev(ioat_chan), "%s: num_descs: %d (%x:%x:%x)\n", in ioat_check_space_lock()
491 __func__, num_descs, ioat_chan->head, in ioat_check_space_lock()
492 ioat_chan->tail, ioat_chan->issued); in ioat_check_space_lock()
493 ioat_chan->produce = num_descs; in ioat_check_space_lock()
496 retry = test_and_set_bit(IOAT_RESHAPE_PENDING, &ioat_chan->state); in ioat_check_space_lock()
497 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_check_space_lock()
503 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_check_space_lock()
504 spin_lock_bh(&ioat_chan->prep_lock); in ioat_check_space_lock()
505 retry = reshape_ring(ioat_chan, ioat_chan->alloc_order + 1); in ioat_check_space_lock()
506 clear_bit(IOAT_RESHAPE_PENDING, &ioat_chan->state); in ioat_check_space_lock()
507 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_check_space_lock()
508 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_check_space_lock()
514 dev_dbg_ratelimited(to_dev(ioat_chan), in ioat_check_space_lock()
516 __func__, num_descs, ioat_chan->head, in ioat_check_space_lock()
517 ioat_chan->tail, ioat_chan->issued); in ioat_check_space_lock()
523 if (time_is_before_jiffies(ioat_chan->timer.expires) in ioat_check_space_lock()
524 && timer_pending(&ioat_chan->timer)) { in ioat_check_space_lock()
525 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_check_space_lock()
526 ioat_timer_event((unsigned long)ioat_chan); in ioat_check_space_lock()
563 static u64 ioat_get_current_completion(struct ioatdma_chan *ioat_chan) in ioat_get_current_completion() argument
568 completion = *ioat_chan->completion; in ioat_get_current_completion()
571 dev_dbg(to_dev(ioat_chan), "%s: phys_complete: %#llx\n", __func__, in ioat_get_current_completion()
577 static bool ioat_cleanup_preamble(struct ioatdma_chan *ioat_chan, in ioat_cleanup_preamble() argument
580 *phys_complete = ioat_get_current_completion(ioat_chan); in ioat_cleanup_preamble()
581 if (*phys_complete == ioat_chan->last_completion) in ioat_cleanup_preamble()
584 clear_bit(IOAT_COMPLETION_ACK, &ioat_chan->state); in ioat_cleanup_preamble()
585 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_cleanup_preamble()
591 desc_get_errstat(struct ioatdma_chan *ioat_chan, struct ioat_ring_ent *desc) in desc_get_errstat() argument
624 static void __cleanup(struct ioatdma_chan *ioat_chan, dma_addr_t phys_complete) in __cleanup() argument
626 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in __cleanup()
629 int idx = ioat_chan->tail, i; in __cleanup()
632 dev_dbg(to_dev(ioat_chan), "%s: head: %#x tail: %#x issued: %#x\n", in __cleanup()
633 __func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued); in __cleanup()
645 active = ioat_ring_active(ioat_chan); in __cleanup()
650 prefetch(ioat_get_ring_ent(ioat_chan, idx + i + 1)); in __cleanup()
651 desc = ioat_get_ring_ent(ioat_chan, idx + i); in __cleanup()
652 dump_desc_dbg(ioat_chan, desc); in __cleanup()
656 desc_get_errstat(ioat_chan, desc); in __cleanup()
686 ioat_chan->tail = idx + i; in __cleanup()
689 ioat_chan->last_completion = phys_complete; in __cleanup()
692 dev_dbg(to_dev(ioat_chan), "%s: cancel completion timeout\n", in __cleanup()
694 mod_timer(&ioat_chan->timer, jiffies + IDLE_TIMEOUT); in __cleanup()
699 ioat_chan->ioat_dma->reg_base + IOAT_INTRDELAY_OFFSET); in __cleanup()
702 static void ioat_cleanup(struct ioatdma_chan *ioat_chan) in ioat_cleanup() argument
706 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_cleanup()
708 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) in ioat_cleanup()
709 __cleanup(ioat_chan, phys_complete); in ioat_cleanup()
711 if (is_ioat_halted(*ioat_chan->completion)) { in ioat_cleanup()
712 u32 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_cleanup()
715 mod_timer(&ioat_chan->timer, jiffies + IDLE_TIMEOUT); in ioat_cleanup()
716 ioat_eh(ioat_chan); in ioat_cleanup()
720 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_cleanup()
725 struct ioatdma_chan *ioat_chan = to_ioat_chan((void *)data); in ioat_cleanup_event() local
727 ioat_cleanup(ioat_chan); in ioat_cleanup_event()
728 if (!test_bit(IOAT_RUN, &ioat_chan->state)) in ioat_cleanup_event()
730 writew(IOAT_CHANCTRL_RUN, ioat_chan->reg_base + IOAT_CHANCTRL_OFFSET); in ioat_cleanup_event()
733 static void ioat_restart_channel(struct ioatdma_chan *ioat_chan) in ioat_restart_channel() argument
737 ioat_quiesce(ioat_chan, 0); in ioat_restart_channel()
738 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) in ioat_restart_channel()
739 __cleanup(ioat_chan, phys_complete); in ioat_restart_channel()
741 __ioat_restart_chan(ioat_chan); in ioat_restart_channel()
744 static void ioat_eh(struct ioatdma_chan *ioat_chan) in ioat_eh() argument
746 struct pci_dev *pdev = to_pdev(ioat_chan); in ioat_eh()
756 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) in ioat_eh()
757 __cleanup(ioat_chan, phys_complete); in ioat_eh()
759 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_eh()
762 dev_dbg(to_dev(ioat_chan), "%s: error = %x:%x\n", in ioat_eh()
765 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); in ioat_eh()
767 dump_desc_dbg(ioat_chan, desc); in ioat_eh()
791 dev_err(to_dev(ioat_chan), "%s: fatal error (%x:%x)\n", in ioat_eh()
806 writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_eh()
810 *ioat_chan->completion = desc->txd.phys; in ioat_eh()
812 spin_lock_bh(&ioat_chan->prep_lock); in ioat_eh()
813 ioat_restart_channel(ioat_chan); in ioat_eh()
814 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_eh()
817 static void check_active(struct ioatdma_chan *ioat_chan) in check_active() argument
819 if (ioat_ring_active(ioat_chan)) { in check_active()
820 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in check_active()
824 if (test_and_clear_bit(IOAT_CHAN_ACTIVE, &ioat_chan->state)) in check_active()
825 mod_timer(&ioat_chan->timer, jiffies + IDLE_TIMEOUT); in check_active()
826 else if (ioat_chan->alloc_order > ioat_get_alloc_order()) { in check_active()
830 reshape_ring(ioat_chan, ioat_chan->alloc_order - 1); in check_active()
835 if (ioat_chan->alloc_order > ioat_get_alloc_order()) in check_active()
836 mod_timer(&ioat_chan->timer, jiffies + IDLE_TIMEOUT); in check_active()
843 struct ioatdma_chan *ioat_chan = to_ioat_chan((void *)data); in ioat_timer_event() local
847 status = ioat_chansts(ioat_chan); in ioat_timer_event()
855 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_timer_event()
856 dev_err(to_dev(ioat_chan), "%s: Channel halted (%x)\n", in ioat_timer_event()
858 if (test_bit(IOAT_RUN, &ioat_chan->state)) in ioat_timer_event()
868 spin_lock_bh(&ioat_chan->cleanup_lock); in ioat_timer_event()
869 if (ioat_cleanup_preamble(ioat_chan, &phys_complete)) in ioat_timer_event()
870 __cleanup(ioat_chan, phys_complete); in ioat_timer_event()
871 else if (test_bit(IOAT_COMPLETION_ACK, &ioat_chan->state)) { in ioat_timer_event()
872 spin_lock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
873 ioat_restart_channel(ioat_chan); in ioat_timer_event()
874 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
875 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_timer_event()
878 set_bit(IOAT_COMPLETION_ACK, &ioat_chan->state); in ioat_timer_event()
879 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_timer_event()
883 if (ioat_ring_active(ioat_chan)) in ioat_timer_event()
884 mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT); in ioat_timer_event()
886 spin_lock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
887 check_active(ioat_chan); in ioat_timer_event()
888 spin_unlock_bh(&ioat_chan->prep_lock); in ioat_timer_event()
890 spin_unlock_bh(&ioat_chan->cleanup_lock); in ioat_timer_event()
897 struct ioatdma_chan *ioat_chan = to_ioat_chan(c); in ioat_tx_status() local
904 ioat_cleanup(ioat_chan); in ioat_tx_status()
921 struct ioatdma_chan *ioat_chan; in ioat_irq_reinit() local
923 ioat_chan = ioat_chan_by_index(ioat_dma, i); in ioat_irq_reinit()
924 devm_free_irq(&pdev->dev, msix->vector, ioat_chan); in ioat_irq_reinit()
943 int ioat_reset_hw(struct ioatdma_chan *ioat_chan) in ioat_reset_hw() argument
948 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma; in ioat_reset_hw()
954 ioat_quiesce(ioat_chan, msecs_to_jiffies(100)); in ioat_reset_hw()
956 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_reset_hw()
957 writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET); in ioat_reset_hw()
982 err = ioat_reset_sync(ioat_chan, msecs_to_jiffies(200)); in ioat_reset_hw()