ioat_chan          60 drivers/dma/ioat/dma.c static void ioat_eh(struct ioatdma_chan *ioat_chan);
ioat_chan          62 drivers/dma/ioat/dma.c static void ioat_print_chanerrs(struct ioatdma_chan *ioat_chan, u32 chanerr)
ioat_chan          68 drivers/dma/ioat/dma.c 			dev_err(to_dev(ioat_chan), "Err(%d): %s\n",
ioat_chan          82 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan;
ioat_chan          99 drivers/dma/ioat/dma.c 		ioat_chan = ioat_chan_by_index(instance, bit);
ioat_chan         100 drivers/dma/ioat/dma.c 		if (test_bit(IOAT_RUN, &ioat_chan->state))
ioat_chan         101 drivers/dma/ioat/dma.c 			tasklet_schedule(&ioat_chan->cleanup_task);
ioat_chan         115 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = data;
ioat_chan         117 drivers/dma/ioat/dma.c 	if (test_bit(IOAT_RUN, &ioat_chan->state))
ioat_chan         118 drivers/dma/ioat/dma.c 		tasklet_schedule(&ioat_chan->cleanup_task);
ioat_chan         123 drivers/dma/ioat/dma.c void ioat_stop(struct ioatdma_chan *ioat_chan)
ioat_chan         125 drivers/dma/ioat/dma.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         127 drivers/dma/ioat/dma.c 	int chan_id = chan_num(ioat_chan);
ioat_chan         133 drivers/dma/ioat/dma.c 	clear_bit(IOAT_RUN, &ioat_chan->state);
ioat_chan         150 drivers/dma/ioat/dma.c 	del_timer_sync(&ioat_chan->timer);
ioat_chan         153 drivers/dma/ioat/dma.c 	tasklet_kill(&ioat_chan->cleanup_task);
ioat_chan         156 drivers/dma/ioat/dma.c 	ioat_cleanup_event((unsigned long)&ioat_chan->dma_chan);
ioat_chan         159 drivers/dma/ioat/dma.c static void __ioat_issue_pending(struct ioatdma_chan *ioat_chan)
ioat_chan         161 drivers/dma/ioat/dma.c 	ioat_chan->dmacount += ioat_ring_pending(ioat_chan);
ioat_chan         162 drivers/dma/ioat/dma.c 	ioat_chan->issued = ioat_chan->head;
ioat_chan         163 drivers/dma/ioat/dma.c 	writew(ioat_chan->dmacount,
ioat_chan         164 drivers/dma/ioat/dma.c 	       ioat_chan->reg_base + IOAT_CHAN_DMACOUNT_OFFSET);
ioat_chan         165 drivers/dma/ioat/dma.c 	dev_dbg(to_dev(ioat_chan),
ioat_chan         167 drivers/dma/ioat/dma.c 		__func__, ioat_chan->head, ioat_chan->tail,
ioat_chan         168 drivers/dma/ioat/dma.c 		ioat_chan->issued, ioat_chan->dmacount);
ioat_chan         173 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         175 drivers/dma/ioat/dma.c 	if (ioat_ring_pending(ioat_chan)) {
ioat_chan         176 drivers/dma/ioat/dma.c 		spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         177 drivers/dma/ioat/dma.c 		__ioat_issue_pending(ioat_chan);
ioat_chan         178 drivers/dma/ioat/dma.c 		spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         189 drivers/dma/ioat/dma.c static void ioat_update_pending(struct ioatdma_chan *ioat_chan)
ioat_chan         191 drivers/dma/ioat/dma.c 	if (ioat_ring_pending(ioat_chan) > ioat_pending_level)
ioat_chan         192 drivers/dma/ioat/dma.c 		__ioat_issue_pending(ioat_chan);
ioat_chan         195 drivers/dma/ioat/dma.c static void __ioat_start_null_desc(struct ioatdma_chan *ioat_chan)
ioat_chan         200 drivers/dma/ioat/dma.c 	if (ioat_ring_space(ioat_chan) < 1) {
ioat_chan         201 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan),
ioat_chan         206 drivers/dma/ioat/dma.c 	dev_dbg(to_dev(ioat_chan),
ioat_chan         208 drivers/dma/ioat/dma.c 		__func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued);
ioat_chan         209 drivers/dma/ioat/dma.c 	desc = ioat_get_ring_ent(ioat_chan, ioat_chan->head);
ioat_chan         221 drivers/dma/ioat/dma.c 	ioat_set_chainaddr(ioat_chan, desc->txd.phys);
ioat_chan         222 drivers/dma/ioat/dma.c 	dump_desc_dbg(ioat_chan, desc);
ioat_chan         225 drivers/dma/ioat/dma.c 	ioat_chan->head += 1;
ioat_chan         226 drivers/dma/ioat/dma.c 	__ioat_issue_pending(ioat_chan);
ioat_chan         229 drivers/dma/ioat/dma.c void ioat_start_null_desc(struct ioatdma_chan *ioat_chan)
ioat_chan         231 drivers/dma/ioat/dma.c 	spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         232 drivers/dma/ioat/dma.c 	if (!test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         233 drivers/dma/ioat/dma.c 		__ioat_start_null_desc(ioat_chan);
ioat_chan         234 drivers/dma/ioat/dma.c 	spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         237 drivers/dma/ioat/dma.c static void __ioat_restart_chan(struct ioatdma_chan *ioat_chan)
ioat_chan         240 drivers/dma/ioat/dma.c 	ioat_chan->issued = ioat_chan->tail;
ioat_chan         241 drivers/dma/ioat/dma.c 	ioat_chan->dmacount = 0;
ioat_chan         242 drivers/dma/ioat/dma.c 	mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT);
ioat_chan         244 drivers/dma/ioat/dma.c 	dev_dbg(to_dev(ioat_chan),
ioat_chan         246 drivers/dma/ioat/dma.c 		__func__, ioat_chan->head, ioat_chan->tail,
ioat_chan         247 drivers/dma/ioat/dma.c 		ioat_chan->issued, ioat_chan->dmacount);
ioat_chan         249 drivers/dma/ioat/dma.c 	if (ioat_ring_pending(ioat_chan)) {
ioat_chan         252 drivers/dma/ioat/dma.c 		desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail);
ioat_chan         253 drivers/dma/ioat/dma.c 		ioat_set_chainaddr(ioat_chan, desc->txd.phys);
ioat_chan         254 drivers/dma/ioat/dma.c 		__ioat_issue_pending(ioat_chan);
ioat_chan         256 drivers/dma/ioat/dma.c 		__ioat_start_null_desc(ioat_chan);
ioat_chan         259 drivers/dma/ioat/dma.c static int ioat_quiesce(struct ioatdma_chan *ioat_chan, unsigned long tmo)
ioat_chan         265 drivers/dma/ioat/dma.c 	status = ioat_chansts(ioat_chan);
ioat_chan         267 drivers/dma/ioat/dma.c 		ioat_suspend(ioat_chan);
ioat_chan         273 drivers/dma/ioat/dma.c 		status = ioat_chansts(ioat_chan);
ioat_chan         280 drivers/dma/ioat/dma.c static int ioat_reset_sync(struct ioatdma_chan *ioat_chan, unsigned long tmo)
ioat_chan         285 drivers/dma/ioat/dma.c 	ioat_reset(ioat_chan);
ioat_chan         286 drivers/dma/ioat/dma.c 	while (ioat_reset_pending(ioat_chan)) {
ioat_chan         298 drivers/dma/ioat/dma.c 	__releases(&ioat_chan->prep_lock)
ioat_chan         301 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         305 drivers/dma/ioat/dma.c 	dev_dbg(to_dev(ioat_chan), "%s: cookie: %d\n", __func__, cookie);
ioat_chan         307 drivers/dma/ioat/dma.c 	if (!test_and_set_bit(IOAT_CHAN_ACTIVE, &ioat_chan->state))
ioat_chan         308 drivers/dma/ioat/dma.c 		mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT);
ioat_chan         316 drivers/dma/ioat/dma.c 	ioat_chan->head += ioat_chan->produce;
ioat_chan         318 drivers/dma/ioat/dma.c 	ioat_update_pending(ioat_chan);
ioat_chan         319 drivers/dma/ioat/dma.c 	spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         329 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(chan);
ioat_chan         338 drivers/dma/ioat/dma.c 	pos = (u8 *)ioat_chan->descs[chunk].virt + offs;
ioat_chan         339 drivers/dma/ioat/dma.c 	phys = ioat_chan->descs[chunk].hw + offs;
ioat_chan         362 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         363 drivers/dma/ioat/dma.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         373 drivers/dma/ioat/dma.c 	ioat_chan->desc_chunks = chunks = (total_descs * IOAT_DESC_SZ) / SZ_2M;
ioat_chan         376 drivers/dma/ioat/dma.c 		struct ioat_descs *descs = &ioat_chan->descs[i];
ioat_chan         378 drivers/dma/ioat/dma.c 		descs->virt = dma_alloc_coherent(to_dev(ioat_chan),
ioat_chan         384 drivers/dma/ioat/dma.c 				descs = &ioat_chan->descs[idx];
ioat_chan         385 drivers/dma/ioat/dma.c 				dma_free_coherent(to_dev(ioat_chan), SZ_2M,
ioat_chan         391 drivers/dma/ioat/dma.c 			ioat_chan->desc_chunks = 0;
ioat_chan         405 drivers/dma/ioat/dma.c 			for (idx = 0; idx < ioat_chan->desc_chunks; idx++) {
ioat_chan         406 drivers/dma/ioat/dma.c 				dma_free_coherent(to_dev(ioat_chan),
ioat_chan         408 drivers/dma/ioat/dma.c 						  ioat_chan->descs[idx].virt,
ioat_chan         409 drivers/dma/ioat/dma.c 						  ioat_chan->descs[idx].hw);
ioat_chan         410 drivers/dma/ioat/dma.c 				ioat_chan->descs[idx].virt = NULL;
ioat_chan         411 drivers/dma/ioat/dma.c 				ioat_chan->descs[idx].hw = 0;
ioat_chan         414 drivers/dma/ioat/dma.c 			ioat_chan->desc_chunks = 0;
ioat_chan         437 drivers/dma/ioat/dma.c 		writew(drsctl, ioat_chan->reg_base + IOAT_CHAN_DRSCTL_OFFSET);
ioat_chan         449 drivers/dma/ioat/dma.c int ioat_check_space_lock(struct ioatdma_chan *ioat_chan, int num_descs)
ioat_chan         450 drivers/dma/ioat/dma.c 	__acquires(&ioat_chan->prep_lock)
ioat_chan         452 drivers/dma/ioat/dma.c 	spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         457 drivers/dma/ioat/dma.c 	if (likely(ioat_ring_space(ioat_chan) > num_descs)) {
ioat_chan         458 drivers/dma/ioat/dma.c 		dev_dbg(to_dev(ioat_chan), "%s: num_descs: %d (%x:%x:%x)\n",
ioat_chan         459 drivers/dma/ioat/dma.c 			__func__, num_descs, ioat_chan->head,
ioat_chan         460 drivers/dma/ioat/dma.c 			ioat_chan->tail, ioat_chan->issued);
ioat_chan         461 drivers/dma/ioat/dma.c 		ioat_chan->produce = num_descs;
ioat_chan         464 drivers/dma/ioat/dma.c 	spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         466 drivers/dma/ioat/dma.c 	dev_dbg_ratelimited(to_dev(ioat_chan),
ioat_chan         468 drivers/dma/ioat/dma.c 			    __func__, num_descs, ioat_chan->head,
ioat_chan         469 drivers/dma/ioat/dma.c 			    ioat_chan->tail, ioat_chan->issued);
ioat_chan         475 drivers/dma/ioat/dma.c 	if (time_is_before_jiffies(ioat_chan->timer.expires)
ioat_chan         476 drivers/dma/ioat/dma.c 	    && timer_pending(&ioat_chan->timer)) {
ioat_chan         477 drivers/dma/ioat/dma.c 		mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT);
ioat_chan         478 drivers/dma/ioat/dma.c 		ioat_timer_event(&ioat_chan->timer);
ioat_chan         515 drivers/dma/ioat/dma.c static u64 ioat_get_current_completion(struct ioatdma_chan *ioat_chan)
ioat_chan         520 drivers/dma/ioat/dma.c 	completion = *ioat_chan->completion;
ioat_chan         523 drivers/dma/ioat/dma.c 	dev_dbg(to_dev(ioat_chan), "%s: phys_complete: %#llx\n", __func__,
ioat_chan         529 drivers/dma/ioat/dma.c static bool ioat_cleanup_preamble(struct ioatdma_chan *ioat_chan,
ioat_chan         532 drivers/dma/ioat/dma.c 	*phys_complete = ioat_get_current_completion(ioat_chan);
ioat_chan         533 drivers/dma/ioat/dma.c 	if (*phys_complete == ioat_chan->last_completion)
ioat_chan         536 drivers/dma/ioat/dma.c 	clear_bit(IOAT_COMPLETION_ACK, &ioat_chan->state);
ioat_chan         537 drivers/dma/ioat/dma.c 	mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT);
ioat_chan         543 drivers/dma/ioat/dma.c desc_get_errstat(struct ioatdma_chan *ioat_chan, struct ioat_ring_ent *desc)
ioat_chan         576 drivers/dma/ioat/dma.c static void __cleanup(struct ioatdma_chan *ioat_chan, dma_addr_t phys_complete)
ioat_chan         578 drivers/dma/ioat/dma.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         581 drivers/dma/ioat/dma.c 	int idx = ioat_chan->tail, i;
ioat_chan         584 drivers/dma/ioat/dma.c 	dev_dbg(to_dev(ioat_chan), "%s: head: %#x tail: %#x issued: %#x\n",
ioat_chan         585 drivers/dma/ioat/dma.c 		__func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued);
ioat_chan         597 drivers/dma/ioat/dma.c 	active = ioat_ring_active(ioat_chan);
ioat_chan         601 drivers/dma/ioat/dma.c 		prefetch(ioat_get_ring_ent(ioat_chan, idx + i + 1));
ioat_chan         602 drivers/dma/ioat/dma.c 		desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         603 drivers/dma/ioat/dma.c 		dump_desc_dbg(ioat_chan, desc);
ioat_chan         607 drivers/dma/ioat/dma.c 			desc_get_errstat(ioat_chan, desc);
ioat_chan         636 drivers/dma/ioat/dma.c 	ioat_chan->tail = idx + i;
ioat_chan         639 drivers/dma/ioat/dma.c 	ioat_chan->last_completion = phys_complete;
ioat_chan         642 drivers/dma/ioat/dma.c 		dev_dbg(to_dev(ioat_chan), "%s: cancel completion timeout\n",
ioat_chan         644 drivers/dma/ioat/dma.c 		mod_timer(&ioat_chan->timer, jiffies + IDLE_TIMEOUT);
ioat_chan         648 drivers/dma/ioat/dma.c 	if (ioat_chan->intr_coalesce != ioat_chan->prev_intr_coalesce) {
ioat_chan         649 drivers/dma/ioat/dma.c 		writew(min((ioat_chan->intr_coalesce * (active - i)),
ioat_chan         651 drivers/dma/ioat/dma.c 		       ioat_chan->ioat_dma->reg_base + IOAT_INTRDELAY_OFFSET);
ioat_chan         652 drivers/dma/ioat/dma.c 		ioat_chan->prev_intr_coalesce = ioat_chan->intr_coalesce;
ioat_chan         656 drivers/dma/ioat/dma.c static void ioat_cleanup(struct ioatdma_chan *ioat_chan)
ioat_chan         660 drivers/dma/ioat/dma.c 	spin_lock_bh(&ioat_chan->cleanup_lock);
ioat_chan         662 drivers/dma/ioat/dma.c 	if (ioat_cleanup_preamble(ioat_chan, &phys_complete))
ioat_chan         663 drivers/dma/ioat/dma.c 		__cleanup(ioat_chan, phys_complete);
ioat_chan         665 drivers/dma/ioat/dma.c 	if (is_ioat_halted(*ioat_chan->completion)) {
ioat_chan         666 drivers/dma/ioat/dma.c 		u32 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         670 drivers/dma/ioat/dma.c 			mod_timer(&ioat_chan->timer, jiffies + IDLE_TIMEOUT);
ioat_chan         671 drivers/dma/ioat/dma.c 			ioat_eh(ioat_chan);
ioat_chan         675 drivers/dma/ioat/dma.c 	spin_unlock_bh(&ioat_chan->cleanup_lock);
ioat_chan         680 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan((void *)data);
ioat_chan         682 drivers/dma/ioat/dma.c 	ioat_cleanup(ioat_chan);
ioat_chan         683 drivers/dma/ioat/dma.c 	if (!test_bit(IOAT_RUN, &ioat_chan->state))
ioat_chan         685 drivers/dma/ioat/dma.c 	writew(IOAT_CHANCTRL_RUN, ioat_chan->reg_base + IOAT_CHANCTRL_OFFSET);
ioat_chan         688 drivers/dma/ioat/dma.c static void ioat_restart_channel(struct ioatdma_chan *ioat_chan)
ioat_chan         693 drivers/dma/ioat/dma.c 	writel(lower_32_bits(ioat_chan->completion_dma),
ioat_chan         694 drivers/dma/ioat/dma.c 	       ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_LOW);
ioat_chan         695 drivers/dma/ioat/dma.c 	writel(upper_32_bits(ioat_chan->completion_dma),
ioat_chan         696 drivers/dma/ioat/dma.c 	       ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_HIGH);
ioat_chan         698 drivers/dma/ioat/dma.c 	ioat_quiesce(ioat_chan, 0);
ioat_chan         699 drivers/dma/ioat/dma.c 	if (ioat_cleanup_preamble(ioat_chan, &phys_complete))
ioat_chan         700 drivers/dma/ioat/dma.c 		__cleanup(ioat_chan, phys_complete);
ioat_chan         702 drivers/dma/ioat/dma.c 	__ioat_restart_chan(ioat_chan);
ioat_chan         706 drivers/dma/ioat/dma.c static void ioat_abort_descs(struct ioatdma_chan *ioat_chan)
ioat_chan         708 drivers/dma/ioat/dma.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         711 drivers/dma/ioat/dma.c 	int idx = ioat_chan->tail, i;
ioat_chan         718 drivers/dma/ioat/dma.c 	active = ioat_ring_active(ioat_chan);
ioat_chan         724 drivers/dma/ioat/dma.c 		prefetch(ioat_get_ring_ent(ioat_chan, idx + i + 1));
ioat_chan         725 drivers/dma/ioat/dma.c 		desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         753 drivers/dma/ioat/dma.c 	ioat_chan->tail = idx + active;
ioat_chan         755 drivers/dma/ioat/dma.c 	desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail);
ioat_chan         756 drivers/dma/ioat/dma.c 	ioat_chan->last_completion = *ioat_chan->completion = desc->txd.phys;
ioat_chan         759 drivers/dma/ioat/dma.c static void ioat_eh(struct ioatdma_chan *ioat_chan)
ioat_chan         761 drivers/dma/ioat/dma.c 	struct pci_dev *pdev = to_pdev(ioat_chan);
ioat_chan         773 drivers/dma/ioat/dma.c 	if (ioat_cleanup_preamble(ioat_chan, &phys_complete))
ioat_chan         774 drivers/dma/ioat/dma.c 		__cleanup(ioat_chan, phys_complete);
ioat_chan         776 drivers/dma/ioat/dma.c 	chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         779 drivers/dma/ioat/dma.c 	dev_dbg(to_dev(ioat_chan), "%s: error = %x:%x\n",
ioat_chan         782 drivers/dma/ioat/dma.c 	desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail);
ioat_chan         784 drivers/dma/ioat/dma.c 	dump_desc_dbg(ioat_chan, desc);
ioat_chan         821 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan), "%s: fatal error (%x:%x)\n",
ioat_chan         823 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan), "Errors handled:\n");
ioat_chan         824 drivers/dma/ioat/dma.c 		ioat_print_chanerrs(ioat_chan, err_handled);
ioat_chan         825 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan), "Errors not handled:\n");
ioat_chan         826 drivers/dma/ioat/dma.c 		ioat_print_chanerrs(ioat_chan, (chanerr & ~err_handled));
ioat_chan         842 drivers/dma/ioat/dma.c 	*ioat_chan->completion = desc->txd.phys;
ioat_chan         844 drivers/dma/ioat/dma.c 	spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         847 drivers/dma/ioat/dma.c 		ioat_abort_descs(ioat_chan);
ioat_chan         849 drivers/dma/ioat/dma.c 		ioat_reset_hw(ioat_chan);
ioat_chan         852 drivers/dma/ioat/dma.c 	writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         855 drivers/dma/ioat/dma.c 	ioat_restart_channel(ioat_chan);
ioat_chan         856 drivers/dma/ioat/dma.c 	spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         859 drivers/dma/ioat/dma.c static void check_active(struct ioatdma_chan *ioat_chan)
ioat_chan         861 drivers/dma/ioat/dma.c 	if (ioat_ring_active(ioat_chan)) {
ioat_chan         862 drivers/dma/ioat/dma.c 		mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT);
ioat_chan         866 drivers/dma/ioat/dma.c 	if (test_and_clear_bit(IOAT_CHAN_ACTIVE, &ioat_chan->state))
ioat_chan         867 drivers/dma/ioat/dma.c 		mod_timer(&ioat_chan->timer, jiffies + IDLE_TIMEOUT);
ioat_chan         872 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = from_timer(ioat_chan, t, timer);
ioat_chan         876 drivers/dma/ioat/dma.c 	status = ioat_chansts(ioat_chan);
ioat_chan         884 drivers/dma/ioat/dma.c 		chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         885 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan), "%s: Channel halted (%x)\n",
ioat_chan         887 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan), "Errors:\n");
ioat_chan         888 drivers/dma/ioat/dma.c 		ioat_print_chanerrs(ioat_chan, chanerr);
ioat_chan         890 drivers/dma/ioat/dma.c 		if (test_bit(IOAT_RUN, &ioat_chan->state)) {
ioat_chan         891 drivers/dma/ioat/dma.c 			spin_lock_bh(&ioat_chan->cleanup_lock);
ioat_chan         892 drivers/dma/ioat/dma.c 			spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         893 drivers/dma/ioat/dma.c 			set_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
ioat_chan         894 drivers/dma/ioat/dma.c 			spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         896 drivers/dma/ioat/dma.c 			ioat_abort_descs(ioat_chan);
ioat_chan         897 drivers/dma/ioat/dma.c 			dev_warn(to_dev(ioat_chan), "Reset channel...\n");
ioat_chan         898 drivers/dma/ioat/dma.c 			ioat_reset_hw(ioat_chan);
ioat_chan         899 drivers/dma/ioat/dma.c 			dev_warn(to_dev(ioat_chan), "Restart channel...\n");
ioat_chan         900 drivers/dma/ioat/dma.c 			ioat_restart_channel(ioat_chan);
ioat_chan         902 drivers/dma/ioat/dma.c 			spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         903 drivers/dma/ioat/dma.c 			clear_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
ioat_chan         904 drivers/dma/ioat/dma.c 			spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         905 drivers/dma/ioat/dma.c 			spin_unlock_bh(&ioat_chan->cleanup_lock);
ioat_chan         911 drivers/dma/ioat/dma.c 	spin_lock_bh(&ioat_chan->cleanup_lock);
ioat_chan         914 drivers/dma/ioat/dma.c 	if (!ioat_ring_active(ioat_chan)) {
ioat_chan         915 drivers/dma/ioat/dma.c 		spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         916 drivers/dma/ioat/dma.c 		check_active(ioat_chan);
ioat_chan         917 drivers/dma/ioat/dma.c 		spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         918 drivers/dma/ioat/dma.c 		spin_unlock_bh(&ioat_chan->cleanup_lock);
ioat_chan         926 drivers/dma/ioat/dma.c 	if (ioat_cleanup_preamble(ioat_chan, &phys_complete))
ioat_chan         927 drivers/dma/ioat/dma.c 		__cleanup(ioat_chan, phys_complete);
ioat_chan         928 drivers/dma/ioat/dma.c 	else if (test_bit(IOAT_COMPLETION_ACK, &ioat_chan->state)) {
ioat_chan         931 drivers/dma/ioat/dma.c 		chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         932 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan), "CHANSTS: %#Lx CHANERR: %#x\n",
ioat_chan         934 drivers/dma/ioat/dma.c 		dev_err(to_dev(ioat_chan), "Errors:\n");
ioat_chan         935 drivers/dma/ioat/dma.c 		ioat_print_chanerrs(ioat_chan, chanerr);
ioat_chan         937 drivers/dma/ioat/dma.c 		dev_dbg(to_dev(ioat_chan), "Active descriptors: %d\n",
ioat_chan         938 drivers/dma/ioat/dma.c 			ioat_ring_active(ioat_chan));
ioat_chan         940 drivers/dma/ioat/dma.c 		spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         941 drivers/dma/ioat/dma.c 		set_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
ioat_chan         942 drivers/dma/ioat/dma.c 		spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         944 drivers/dma/ioat/dma.c 		ioat_abort_descs(ioat_chan);
ioat_chan         945 drivers/dma/ioat/dma.c 		dev_warn(to_dev(ioat_chan), "Resetting channel...\n");
ioat_chan         946 drivers/dma/ioat/dma.c 		ioat_reset_hw(ioat_chan);
ioat_chan         947 drivers/dma/ioat/dma.c 		dev_warn(to_dev(ioat_chan), "Restarting channel...\n");
ioat_chan         948 drivers/dma/ioat/dma.c 		ioat_restart_channel(ioat_chan);
ioat_chan         950 drivers/dma/ioat/dma.c 		spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         951 drivers/dma/ioat/dma.c 		clear_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
ioat_chan         952 drivers/dma/ioat/dma.c 		spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         953 drivers/dma/ioat/dma.c 		spin_unlock_bh(&ioat_chan->cleanup_lock);
ioat_chan         956 drivers/dma/ioat/dma.c 		set_bit(IOAT_COMPLETION_ACK, &ioat_chan->state);
ioat_chan         958 drivers/dma/ioat/dma.c 	mod_timer(&ioat_chan->timer, jiffies + COMPLETION_TIMEOUT);
ioat_chan         959 drivers/dma/ioat/dma.c 	spin_unlock_bh(&ioat_chan->cleanup_lock);
ioat_chan         966 drivers/dma/ioat/dma.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         973 drivers/dma/ioat/dma.c 	ioat_cleanup(ioat_chan);
ioat_chan         978 drivers/dma/ioat/dma.c int ioat_reset_hw(struct ioatdma_chan *ioat_chan)
ioat_chan         983 drivers/dma/ioat/dma.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         989 drivers/dma/ioat/dma.c 	ioat_quiesce(ioat_chan, msecs_to_jiffies(100));
ioat_chan         991 drivers/dma/ioat/dma.c 	chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         992 drivers/dma/ioat/dma.c 	writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan        1024 drivers/dma/ioat/dma.c 	err = ioat_reset_sync(ioat_chan, msecs_to_jiffies(200));
ioat_chan          23 drivers/dma/ioat/dma.h #define to_dev(ioat_chan) (&(ioat_chan)->ioat_dma->pdev->dev)
ioat_chan          24 drivers/dma/ioat/dma.h #define to_pdev(ioat_chan) ((ioat_chan)->ioat_dma->pdev)
ioat_chan         217 drivers/dma/ioat/dma.h __dump_desc_dbg(struct ioatdma_chan *ioat_chan, struct ioat_dma_descriptor *hw,
ioat_chan         220 drivers/dma/ioat/dma.h 	struct device *dev = to_dev(ioat_chan);
ioat_chan         238 drivers/dma/ioat/dma.h static inline u64 ioat_chansts(struct ioatdma_chan *ioat_chan)
ioat_chan         240 drivers/dma/ioat/dma.h 	return readq(ioat_chan->reg_base + IOAT_CHANSTS_OFFSET);
ioat_chan         248 drivers/dma/ioat/dma.h static inline u32 ioat_chanerr(struct ioatdma_chan *ioat_chan)
ioat_chan         250 drivers/dma/ioat/dma.h 	return readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         253 drivers/dma/ioat/dma.h static inline void ioat_suspend(struct ioatdma_chan *ioat_chan)
ioat_chan         255 drivers/dma/ioat/dma.h 	u8 ver = ioat_chan->ioat_dma->version;
ioat_chan         258 drivers/dma/ioat/dma.h 	       ioat_chan->reg_base + IOAT_CHANCMD_OFFSET(ver));
ioat_chan         261 drivers/dma/ioat/dma.h static inline void ioat_reset(struct ioatdma_chan *ioat_chan)
ioat_chan         263 drivers/dma/ioat/dma.h 	u8 ver = ioat_chan->ioat_dma->version;
ioat_chan         266 drivers/dma/ioat/dma.h 	       ioat_chan->reg_base + IOAT_CHANCMD_OFFSET(ver));
ioat_chan         269 drivers/dma/ioat/dma.h static inline bool ioat_reset_pending(struct ioatdma_chan *ioat_chan)
ioat_chan         271 drivers/dma/ioat/dma.h 	u8 ver = ioat_chan->ioat_dma->version;
ioat_chan         274 drivers/dma/ioat/dma.h 	cmd = readb(ioat_chan->reg_base + IOAT_CHANCMD_OFFSET(ver));
ioat_chan         308 drivers/dma/ioat/dma.h static inline u32 ioat_ring_size(struct ioatdma_chan *ioat_chan)
ioat_chan         310 drivers/dma/ioat/dma.h 	return 1 << ioat_chan->alloc_order;
ioat_chan         314 drivers/dma/ioat/dma.h static inline u16 ioat_ring_active(struct ioatdma_chan *ioat_chan)
ioat_chan         316 drivers/dma/ioat/dma.h 	return CIRC_CNT(ioat_chan->head, ioat_chan->tail,
ioat_chan         317 drivers/dma/ioat/dma.h 			ioat_ring_size(ioat_chan));
ioat_chan         321 drivers/dma/ioat/dma.h static inline u16 ioat_ring_pending(struct ioatdma_chan *ioat_chan)
ioat_chan         323 drivers/dma/ioat/dma.h 	return CIRC_CNT(ioat_chan->head, ioat_chan->issued,
ioat_chan         324 drivers/dma/ioat/dma.h 			ioat_ring_size(ioat_chan));
ioat_chan         327 drivers/dma/ioat/dma.h static inline u32 ioat_ring_space(struct ioatdma_chan *ioat_chan)
ioat_chan         329 drivers/dma/ioat/dma.h 	return ioat_ring_size(ioat_chan) - ioat_ring_active(ioat_chan);
ioat_chan         333 drivers/dma/ioat/dma.h ioat_xferlen_to_descs(struct ioatdma_chan *ioat_chan, size_t len)
ioat_chan         335 drivers/dma/ioat/dma.h 	u16 num_descs = len >> ioat_chan->xfercap_log;
ioat_chan         337 drivers/dma/ioat/dma.h 	num_descs += !!(len & ((1 << ioat_chan->xfercap_log) - 1));
ioat_chan         342 drivers/dma/ioat/dma.h ioat_get_ring_ent(struct ioatdma_chan *ioat_chan, u16 idx)
ioat_chan         344 drivers/dma/ioat/dma.h 	return ioat_chan->ring[idx & (ioat_ring_size(ioat_chan) - 1)];
ioat_chan         348 drivers/dma/ioat/dma.h ioat_set_chainaddr(struct ioatdma_chan *ioat_chan, u64 addr)
ioat_chan         351 drivers/dma/ioat/dma.h 	       ioat_chan->reg_base + IOAT2_CHAINADDR_OFFSET_LOW);
ioat_chan         353 drivers/dma/ioat/dma.h 	       ioat_chan->reg_base + IOAT2_CHAINADDR_OFFSET_HIGH);
ioat_chan         390 drivers/dma/ioat/dma.h void ioat_start_null_desc(struct ioatdma_chan *ioat_chan);
ioat_chan         392 drivers/dma/ioat/dma.h int ioat_reset_hw(struct ioatdma_chan *ioat_chan);
ioat_chan         398 drivers/dma/ioat/dma.h int ioat_check_space_lock(struct ioatdma_chan *ioat_chan, int num_descs);
ioat_chan         407 drivers/dma/ioat/dma.h void ioat_stop(struct ioatdma_chan *ioat_chan);
ioat_chan         121 drivers/dma/ioat/init.c 		  struct ioatdma_chan *ioat_chan, int idx);
ioat_chan         405 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan;
ioat_chan         434 drivers/dma/ioat/init.c 		ioat_chan = ioat_chan_by_index(ioat_dma, i);
ioat_chan         437 drivers/dma/ioat/init.c 				       "ioat-msix", ioat_chan);
ioat_chan         441 drivers/dma/ioat/init.c 				ioat_chan = ioat_chan_by_index(ioat_dma, j);
ioat_chan         442 drivers/dma/ioat/init.c 				devm_free_irq(dev, msix->vector, ioat_chan);
ioat_chan         571 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan;
ioat_chan         592 drivers/dma/ioat/init.c 		ioat_chan = devm_kzalloc(dev, sizeof(*ioat_chan), GFP_KERNEL);
ioat_chan         593 drivers/dma/ioat/init.c 		if (!ioat_chan)
ioat_chan         596 drivers/dma/ioat/init.c 		ioat_init_channel(ioat_dma, ioat_chan, i);
ioat_chan         597 drivers/dma/ioat/init.c 		ioat_chan->xfercap_log = xfercap_log;
ioat_chan         598 drivers/dma/ioat/init.c 		spin_lock_init(&ioat_chan->prep_lock);
ioat_chan         599 drivers/dma/ioat/init.c 		if (ioat_reset_hw(ioat_chan)) {
ioat_chan         613 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         614 drivers/dma/ioat/init.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         616 drivers/dma/ioat/init.c 	const int total_descs = 1 << ioat_chan->alloc_order;
ioat_chan         623 drivers/dma/ioat/init.c 	if (!ioat_chan->ring)
ioat_chan         626 drivers/dma/ioat/init.c 	ioat_stop(ioat_chan);
ioat_chan         627 drivers/dma/ioat/init.c 	ioat_reset_hw(ioat_chan);
ioat_chan         632 drivers/dma/ioat/init.c 			ioat_chan->reg_base + IOAT_CHAN_LTR_SWSEL_OFFSET);
ioat_chan         634 drivers/dma/ioat/init.c 	spin_lock_bh(&ioat_chan->cleanup_lock);
ioat_chan         635 drivers/dma/ioat/init.c 	spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         636 drivers/dma/ioat/init.c 	descs = ioat_ring_space(ioat_chan);
ioat_chan         637 drivers/dma/ioat/init.c 	dev_dbg(to_dev(ioat_chan), "freeing %d idle descriptors\n", descs);
ioat_chan         639 drivers/dma/ioat/init.c 		desc = ioat_get_ring_ent(ioat_chan, ioat_chan->head + i);
ioat_chan         644 drivers/dma/ioat/init.c 		dev_err(to_dev(ioat_chan), "Freeing %d in use descriptors!\n",
ioat_chan         648 drivers/dma/ioat/init.c 		desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail + i);
ioat_chan         649 drivers/dma/ioat/init.c 		dump_desc_dbg(ioat_chan, desc);
ioat_chan         653 drivers/dma/ioat/init.c 	for (i = 0; i < ioat_chan->desc_chunks; i++) {
ioat_chan         654 drivers/dma/ioat/init.c 		dma_free_coherent(to_dev(ioat_chan), SZ_2M,
ioat_chan         655 drivers/dma/ioat/init.c 				  ioat_chan->descs[i].virt,
ioat_chan         656 drivers/dma/ioat/init.c 				  ioat_chan->descs[i].hw);
ioat_chan         657 drivers/dma/ioat/init.c 		ioat_chan->descs[i].virt = NULL;
ioat_chan         658 drivers/dma/ioat/init.c 		ioat_chan->descs[i].hw = 0;
ioat_chan         660 drivers/dma/ioat/init.c 	ioat_chan->desc_chunks = 0;
ioat_chan         662 drivers/dma/ioat/init.c 	kfree(ioat_chan->ring);
ioat_chan         663 drivers/dma/ioat/init.c 	ioat_chan->ring = NULL;
ioat_chan         664 drivers/dma/ioat/init.c 	ioat_chan->alloc_order = 0;
ioat_chan         665 drivers/dma/ioat/init.c 	dma_pool_free(ioat_dma->completion_pool, ioat_chan->completion,
ioat_chan         666 drivers/dma/ioat/init.c 		      ioat_chan->completion_dma);
ioat_chan         667 drivers/dma/ioat/init.c 	spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         668 drivers/dma/ioat/init.c 	spin_unlock_bh(&ioat_chan->cleanup_lock);
ioat_chan         670 drivers/dma/ioat/init.c 	ioat_chan->last_completion = 0;
ioat_chan         671 drivers/dma/ioat/init.c 	ioat_chan->completion_dma = 0;
ioat_chan         672 drivers/dma/ioat/init.c 	ioat_chan->dmacount = 0;
ioat_chan         680 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         688 drivers/dma/ioat/init.c 	if (ioat_chan->ring)
ioat_chan         689 drivers/dma/ioat/init.c 		return 1 << ioat_chan->alloc_order;
ioat_chan         692 drivers/dma/ioat/init.c 	writew(IOAT_CHANCTRL_RUN, ioat_chan->reg_base + IOAT_CHANCTRL_OFFSET);
ioat_chan         696 drivers/dma/ioat/init.c 	ioat_chan->completion =
ioat_chan         697 drivers/dma/ioat/init.c 		dma_pool_zalloc(ioat_chan->ioat_dma->completion_pool,
ioat_chan         698 drivers/dma/ioat/init.c 				GFP_NOWAIT, &ioat_chan->completion_dma);
ioat_chan         699 drivers/dma/ioat/init.c 	if (!ioat_chan->completion)
ioat_chan         702 drivers/dma/ioat/init.c 	writel(((u64)ioat_chan->completion_dma) & 0x00000000FFFFFFFF,
ioat_chan         703 drivers/dma/ioat/init.c 	       ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_LOW);
ioat_chan         704 drivers/dma/ioat/init.c 	writel(((u64)ioat_chan->completion_dma) >> 32,
ioat_chan         705 drivers/dma/ioat/init.c 	       ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_HIGH);
ioat_chan         712 drivers/dma/ioat/init.c 	spin_lock_bh(&ioat_chan->cleanup_lock);
ioat_chan         713 drivers/dma/ioat/init.c 	spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan         714 drivers/dma/ioat/init.c 	ioat_chan->ring = ring;
ioat_chan         715 drivers/dma/ioat/init.c 	ioat_chan->head = 0;
ioat_chan         716 drivers/dma/ioat/init.c 	ioat_chan->issued = 0;
ioat_chan         717 drivers/dma/ioat/init.c 	ioat_chan->tail = 0;
ioat_chan         718 drivers/dma/ioat/init.c 	ioat_chan->alloc_order = order;
ioat_chan         719 drivers/dma/ioat/init.c 	set_bit(IOAT_RUN, &ioat_chan->state);
ioat_chan         720 drivers/dma/ioat/init.c 	spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan         721 drivers/dma/ioat/init.c 	spin_unlock_bh(&ioat_chan->cleanup_lock);
ioat_chan         724 drivers/dma/ioat/init.c 	if (ioat_chan->ioat_dma->version >= IOAT_VER_3_4) {
ioat_chan         730 drivers/dma/ioat/init.c 		writel(lat_val, ioat_chan->reg_base +
ioat_chan         736 drivers/dma/ioat/init.c 		writel(lat_val, ioat_chan->reg_base +
ioat_chan         741 drivers/dma/ioat/init.c 		       ioat_chan->reg_base +
ioat_chan         745 drivers/dma/ioat/init.c 	ioat_start_null_desc(ioat_chan);
ioat_chan         750 drivers/dma/ioat/init.c 		status = ioat_chansts(ioat_chan);
ioat_chan         754 drivers/dma/ioat/init.c 		return 1 << ioat_chan->alloc_order;
ioat_chan         756 drivers/dma/ioat/init.c 	chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         758 drivers/dma/ioat/init.c 	dev_WARN(to_dev(ioat_chan),
ioat_chan         767 drivers/dma/ioat/init.c 		  struct ioatdma_chan *ioat_chan, int idx)
ioat_chan         770 drivers/dma/ioat/init.c 	struct dma_chan *c = &ioat_chan->dma_chan;
ioat_chan         773 drivers/dma/ioat/init.c 	ioat_chan->ioat_dma = ioat_dma;
ioat_chan         774 drivers/dma/ioat/init.c 	ioat_chan->reg_base = ioat_dma->reg_base + (0x80 * (idx + 1));
ioat_chan         775 drivers/dma/ioat/init.c 	spin_lock_init(&ioat_chan->cleanup_lock);
ioat_chan         776 drivers/dma/ioat/init.c 	ioat_chan->dma_chan.device = dma;
ioat_chan         777 drivers/dma/ioat/init.c 	dma_cookie_init(&ioat_chan->dma_chan);
ioat_chan         778 drivers/dma/ioat/init.c 	list_add_tail(&ioat_chan->dma_chan.device_node, &dma->channels);
ioat_chan         779 drivers/dma/ioat/init.c 	ioat_dma->idx[idx] = ioat_chan;
ioat_chan         780 drivers/dma/ioat/init.c 	timer_setup(&ioat_chan->timer, ioat_timer_event, 0);
ioat_chan         781 drivers/dma/ioat/init.c 	tasklet_init(&ioat_chan->cleanup_task, ioat_cleanup_event, data);
ioat_chan        1067 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan;
ioat_chan        1078 drivers/dma/ioat/init.c 			ioat_chan = to_ioat_chan(c);
ioat_chan        1079 drivers/dma/ioat/init.c 			errmask = readl(ioat_chan->reg_base +
ioat_chan        1083 drivers/dma/ioat/init.c 			writel(errmask, ioat_chan->reg_base +
ioat_chan        1095 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan;
ioat_chan        1181 drivers/dma/ioat/init.c 		ioat_chan = to_ioat_chan(c);
ioat_chan        1183 drivers/dma/ioat/init.c 		       ioat_chan->reg_base + IOAT_DCACTRL_OFFSET);
ioat_chan        1216 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan;
ioat_chan        1223 drivers/dma/ioat/init.c 		ioat_chan = ioat_dma->idx[i];
ioat_chan        1224 drivers/dma/ioat/init.c 		if (!ioat_chan)
ioat_chan        1227 drivers/dma/ioat/init.c 		spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan        1228 drivers/dma/ioat/init.c 		set_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
ioat_chan        1229 drivers/dma/ioat/init.c 		spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan        1236 drivers/dma/ioat/init.c 		del_timer_sync(&ioat_chan->timer);
ioat_chan        1239 drivers/dma/ioat/init.c 		ioat_reset_hw(ioat_chan);
ioat_chan        1247 drivers/dma/ioat/init.c 	struct ioatdma_chan *ioat_chan;
ioat_chan        1252 drivers/dma/ioat/init.c 		ioat_chan = ioat_dma->idx[i];
ioat_chan        1253 drivers/dma/ioat/init.c 		if (!ioat_chan)
ioat_chan        1256 drivers/dma/ioat/init.c 		spin_lock_bh(&ioat_chan->prep_lock);
ioat_chan        1257 drivers/dma/ioat/init.c 		clear_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
ioat_chan        1258 drivers/dma/ioat/init.c 		spin_unlock_bh(&ioat_chan->prep_lock);
ioat_chan        1260 drivers/dma/ioat/init.c 		chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan        1261 drivers/dma/ioat/init.c 		writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
ioat_chan         104 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         112 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         115 drivers/dma/ioat/prep.c 	num_descs = ioat_xferlen_to_descs(ioat_chan, len);
ioat_chan         117 drivers/dma/ioat/prep.c 	    ioat_check_space_lock(ioat_chan, num_descs) == 0)
ioat_chan         118 drivers/dma/ioat/prep.c 		idx = ioat_chan->head;
ioat_chan         123 drivers/dma/ioat/prep.c 		size_t copy = min_t(size_t, len, 1 << ioat_chan->xfercap_log);
ioat_chan         125 drivers/dma/ioat/prep.c 		desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         136 drivers/dma/ioat/prep.c 		dump_desc_dbg(ioat_chan, desc);
ioat_chan         144 drivers/dma/ioat/prep.c 	dump_desc_dbg(ioat_chan, desc);
ioat_chan         156 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         170 drivers/dma/ioat/prep.c 	num_descs = ioat_xferlen_to_descs(ioat_chan, len);
ioat_chan         186 drivers/dma/ioat/prep.c 	    ioat_check_space_lock(ioat_chan, num_descs+1) == 0)
ioat_chan         187 drivers/dma/ioat/prep.c 		idx = ioat_chan->head;
ioat_chan         194 drivers/dma/ioat/prep.c 					 len, 1 << ioat_chan->xfercap_log);
ioat_chan         197 drivers/dma/ioat/prep.c 		desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         204 drivers/dma/ioat/prep.c 		ext = ioat_get_ring_ent(ioat_chan, idx + i + 1);
ioat_chan         219 drivers/dma/ioat/prep.c 		dump_desc_dbg(ioat_chan, desc);
ioat_chan         230 drivers/dma/ioat/prep.c 	compl_desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         238 drivers/dma/ioat/prep.c 	dump_desc_dbg(ioat_chan, compl_desc);
ioat_chan         248 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(chan);
ioat_chan         250 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         261 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(chan);
ioat_chan         263 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         276 drivers/dma/ioat/prep.c dump_pq_desc_dbg(struct ioatdma_chan *ioat_chan, struct ioat_ring_ent *desc,
ioat_chan         279 drivers/dma/ioat/prep.c 	struct device *dev = to_dev(ioat_chan);
ioat_chan         303 drivers/dma/ioat/prep.c static void dump_pq16_desc_dbg(struct ioatdma_chan *ioat_chan,
ioat_chan         306 drivers/dma/ioat/prep.c 	struct device *dev = to_dev(ioat_chan);
ioat_chan         344 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         345 drivers/dma/ioat/prep.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         358 drivers/dma/ioat/prep.c 	dev_dbg(to_dev(ioat_chan), "%s\n", __func__);
ioat_chan         364 drivers/dma/ioat/prep.c 	num_descs = ioat_xferlen_to_descs(ioat_chan, len);
ioat_chan         382 drivers/dma/ioat/prep.c 	    ioat_check_space_lock(ioat_chan, num_descs + cb32) == 0)
ioat_chan         383 drivers/dma/ioat/prep.c 		idx = ioat_chan->head;
ioat_chan         390 drivers/dma/ioat/prep.c 					 1 << ioat_chan->xfercap_log);
ioat_chan         392 drivers/dma/ioat/prep.c 		desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         399 drivers/dma/ioat/prep.c 		ext = ioat_get_ring_ent(ioat_chan, idx + i + with_ext);
ioat_chan         438 drivers/dma/ioat/prep.c 	dump_pq_desc_dbg(ioat_chan, desc, ext);
ioat_chan         446 drivers/dma/ioat/prep.c 		compl_desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         454 drivers/dma/ioat/prep.c 		dump_desc_dbg(ioat_chan, compl_desc);
ioat_chan         468 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         469 drivers/dma/ioat/prep.c 	struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
ioat_chan         480 drivers/dma/ioat/prep.c 	dev_dbg(to_dev(ioat_chan), "%s\n", __func__);
ioat_chan         482 drivers/dma/ioat/prep.c 	num_descs = ioat_xferlen_to_descs(ioat_chan, len);
ioat_chan         488 drivers/dma/ioat/prep.c 	if (num_descs && ioat_check_space_lock(ioat_chan, num_descs) == 0)
ioat_chan         489 drivers/dma/ioat/prep.c 		idx = ioat_chan->head;
ioat_chan         498 drivers/dma/ioat/prep.c 					 1 << ioat_chan->xfercap_log);
ioat_chan         500 drivers/dma/ioat/prep.c 		desc = ioat_get_ring_ent(ioat_chan, idx + i);
ioat_chan         507 drivers/dma/ioat/prep.c 			dev_err(to_dev(ioat_chan),
ioat_chan         557 drivers/dma/ioat/prep.c 	dump_pq16_desc_dbg(ioat_chan, desc);
ioat_chan         578 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(chan);
ioat_chan         580 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         623 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(chan);
ioat_chan         625 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         652 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(chan);
ioat_chan         654 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         679 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(chan);
ioat_chan         681 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         707 drivers/dma/ioat/prep.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         711 drivers/dma/ioat/prep.c 	if (test_bit(IOAT_CHAN_DOWN, &ioat_chan->state))
ioat_chan         714 drivers/dma/ioat/prep.c 	if (ioat_check_space_lock(ioat_chan, 1) == 0)
ioat_chan         715 drivers/dma/ioat/prep.c 		desc = ioat_get_ring_ent(ioat_chan, ioat_chan->head);
ioat_chan         732 drivers/dma/ioat/prep.c 	dump_desc_dbg(ioat_chan, desc);
ioat_chan          45 drivers/dma/ioat/sysfs.c 	struct ioatdma_chan *ioat_chan;
ioat_chan          48 drivers/dma/ioat/sysfs.c 	ioat_chan = container_of(kobj, struct ioatdma_chan, kobj);
ioat_chan          52 drivers/dma/ioat/sysfs.c 	return entry->show(&ioat_chan->dma_chan, page);
ioat_chan          60 drivers/dma/ioat/sysfs.c 	struct ioatdma_chan *ioat_chan;
ioat_chan          63 drivers/dma/ioat/sysfs.c 	ioat_chan = container_of(kobj, struct ioatdma_chan, kobj);
ioat_chan          67 drivers/dma/ioat/sysfs.c 	return entry->store(&ioat_chan->dma_chan, page, count);
ioat_chan          81 drivers/dma/ioat/sysfs.c 		struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan          85 drivers/dma/ioat/sysfs.c 		err = kobject_init_and_add(&ioat_chan->kobj, type,
ioat_chan          88 drivers/dma/ioat/sysfs.c 			dev_warn(to_dev(ioat_chan),
ioat_chan          90 drivers/dma/ioat/sysfs.c 			kobject_put(&ioat_chan->kobj);
ioat_chan          91 drivers/dma/ioat/sysfs.c 			set_bit(IOAT_KOBJ_INIT_FAIL, &ioat_chan->state);
ioat_chan         102 drivers/dma/ioat/sysfs.c 		struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         104 drivers/dma/ioat/sysfs.c 		if (!test_bit(IOAT_KOBJ_INIT_FAIL, &ioat_chan->state)) {
ioat_chan         105 drivers/dma/ioat/sysfs.c 			kobject_del(&ioat_chan->kobj);
ioat_chan         106 drivers/dma/ioat/sysfs.c 			kobject_put(&ioat_chan->kobj);
ioat_chan         113 drivers/dma/ioat/sysfs.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         115 drivers/dma/ioat/sysfs.c 	return sprintf(page, "%d\n", (1 << ioat_chan->alloc_order) & ~1);
ioat_chan         121 drivers/dma/ioat/sysfs.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         124 drivers/dma/ioat/sysfs.c 	return sprintf(page, "%d\n", ioat_ring_active(ioat_chan));
ioat_chan         130 drivers/dma/ioat/sysfs.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         132 drivers/dma/ioat/sysfs.c 	return sprintf(page, "%d\n", ioat_chan->intr_coalesce);
ioat_chan         139 drivers/dma/ioat/sysfs.c 	struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
ioat_chan         145 drivers/dma/ioat/sysfs.c 		ioat_chan->intr_coalesce = intr_coalesce;