mv_chan           178 drivers/dma/mv_xor.c static void mv_chan_start_new_chain(struct mv_xor_chan *mv_chan,
mv_chan           181 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan), "%s %d: sw_desc %p\n",
mv_chan           185 drivers/dma/mv_xor.c 	mv_chan_set_next_descriptor(mv_chan, sw_desc->async_tx.phys);
mv_chan           187 drivers/dma/mv_xor.c 	mv_chan->pending++;
mv_chan           188 drivers/dma/mv_xor.c 	mv_xor_issue_pending(&mv_chan->dmachan);
mv_chan           193 drivers/dma/mv_xor.c 				struct mv_xor_chan *mv_chan,
mv_chan           215 drivers/dma/mv_xor.c mv_chan_clean_completed_slots(struct mv_xor_chan *mv_chan)
mv_chan           219 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan), "%s %d\n", __func__, __LINE__);
mv_chan           220 drivers/dma/mv_xor.c 	list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots,
mv_chan           224 drivers/dma/mv_xor.c 			list_move_tail(&iter->node, &mv_chan->free_slots);
mv_chan           227 drivers/dma/mv_xor.c 							&mv_chan->free_slots);
mv_chan           236 drivers/dma/mv_xor.c 		   struct mv_xor_chan *mv_chan)
mv_chan           238 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan), "%s %d: desc %p flags %d\n",
mv_chan           246 drivers/dma/mv_xor.c 		list_move_tail(&desc->node, &mv_chan->completed_slots);
mv_chan           249 drivers/dma/mv_xor.c 					      &mv_chan->completed_slots);
mv_chan           252 drivers/dma/mv_xor.c 		list_move_tail(&desc->node, &mv_chan->free_slots);
mv_chan           255 drivers/dma/mv_xor.c 					      &mv_chan->free_slots);
mv_chan           263 drivers/dma/mv_xor.c static void mv_chan_slot_cleanup(struct mv_xor_chan *mv_chan)
mv_chan           267 drivers/dma/mv_xor.c 	int busy = mv_chan_is_busy(mv_chan);
mv_chan           268 drivers/dma/mv_xor.c 	u32 current_desc = mv_chan_get_current_desc(mv_chan);
mv_chan           272 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan), "%s %d\n", __func__, __LINE__);
mv_chan           273 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan), "current_desc %x\n", current_desc);
mv_chan           274 drivers/dma/mv_xor.c 	mv_chan_clean_completed_slots(mv_chan);
mv_chan           280 drivers/dma/mv_xor.c 	list_for_each_entry_safe(iter, _iter, &mv_chan->chain,
mv_chan           286 drivers/dma/mv_xor.c 			cookie = mv_desc_run_tx_complete_actions(iter, mv_chan,
mv_chan           290 drivers/dma/mv_xor.c 			mv_desc_clean_slot(iter, mv_chan);
mv_chan           305 drivers/dma/mv_xor.c 	if ((busy == 0) && !list_empty(&mv_chan->chain)) {
mv_chan           311 drivers/dma/mv_xor.c 			iter = list_entry(mv_chan->chain.next,
mv_chan           314 drivers/dma/mv_xor.c 			mv_chan_start_new_chain(mv_chan, iter);
mv_chan           316 drivers/dma/mv_xor.c 			if (!list_is_last(&iter->node, &mv_chan->chain)) {
mv_chan           324 drivers/dma/mv_xor.c 				mv_chan_start_new_chain(mv_chan, iter);
mv_chan           330 drivers/dma/mv_xor.c 				tasklet_schedule(&mv_chan->irq_tasklet);
mv_chan           336 drivers/dma/mv_xor.c 		mv_chan->dmachan.completed_cookie = cookie;
mv_chan           349 drivers/dma/mv_xor.c mv_chan_alloc_slot(struct mv_xor_chan *mv_chan)
mv_chan           353 drivers/dma/mv_xor.c 	spin_lock_bh(&mv_chan->lock);
mv_chan           355 drivers/dma/mv_xor.c 	if (!list_empty(&mv_chan->free_slots)) {
mv_chan           356 drivers/dma/mv_xor.c 		iter = list_first_entry(&mv_chan->free_slots,
mv_chan           360 drivers/dma/mv_xor.c 		list_move_tail(&iter->node, &mv_chan->allocated_slots);
mv_chan           362 drivers/dma/mv_xor.c 		spin_unlock_bh(&mv_chan->lock);
mv_chan           372 drivers/dma/mv_xor.c 	spin_unlock_bh(&mv_chan->lock);
mv_chan           375 drivers/dma/mv_xor.c 	tasklet_schedule(&mv_chan->irq_tasklet);
mv_chan           385 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan = to_mv_xor_chan(tx->chan);
mv_chan           390 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan),
mv_chan           394 drivers/dma/mv_xor.c 	spin_lock_bh(&mv_chan->lock);
mv_chan           397 drivers/dma/mv_xor.c 	if (list_empty(&mv_chan->chain))
mv_chan           398 drivers/dma/mv_xor.c 		list_move_tail(&sw_desc->node, &mv_chan->chain);
mv_chan           402 drivers/dma/mv_xor.c 		old_chain_tail = list_entry(mv_chan->chain.prev,
mv_chan           405 drivers/dma/mv_xor.c 		list_move_tail(&sw_desc->node, &mv_chan->chain);
mv_chan           407 drivers/dma/mv_xor.c 		dev_dbg(mv_chan_to_devp(mv_chan), "Append to last desc %pa\n",
mv_chan           414 drivers/dma/mv_xor.c 		if (!mv_chan_is_busy(mv_chan)) {
mv_chan           415 drivers/dma/mv_xor.c 			u32 current_desc = mv_chan_get_current_desc(mv_chan);
mv_chan           426 drivers/dma/mv_xor.c 		mv_chan_start_new_chain(mv_chan, sw_desc);
mv_chan           428 drivers/dma/mv_xor.c 	spin_unlock_bh(&mv_chan->lock);
mv_chan           439 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan);
mv_chan           444 drivers/dma/mv_xor.c 	idx = mv_chan->slots_allocated;
mv_chan           448 drivers/dma/mv_xor.c 			dev_info(mv_chan_to_devp(mv_chan),
mv_chan           453 drivers/dma/mv_xor.c 		virt_desc = mv_chan->dma_desc_pool_virt;
mv_chan           460 drivers/dma/mv_xor.c 		dma_desc = mv_chan->dma_desc_pool;
mv_chan           464 drivers/dma/mv_xor.c 		spin_lock_bh(&mv_chan->lock);
mv_chan           465 drivers/dma/mv_xor.c 		mv_chan->slots_allocated = idx;
mv_chan           466 drivers/dma/mv_xor.c 		list_add_tail(&slot->node, &mv_chan->free_slots);
mv_chan           467 drivers/dma/mv_xor.c 		spin_unlock_bh(&mv_chan->lock);
mv_chan           470 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan),
mv_chan           472 drivers/dma/mv_xor.c 		mv_chan->slots_allocated);
mv_chan           474 drivers/dma/mv_xor.c 	return mv_chan->slots_allocated ? : -ENOMEM;
mv_chan           483 drivers/dma/mv_xor.c static int mv_xor_add_io_win(struct mv_xor_chan *mv_chan, u32 addr)
mv_chan           485 drivers/dma/mv_xor.c 	struct mv_xor_device *xordev = mv_chan->xordev;
mv_chan           486 drivers/dma/mv_xor.c 	void __iomem *base = mv_chan->mmr_high_base;
mv_chan           558 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan);
mv_chan           567 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan),
mv_chan           572 drivers/dma/mv_xor.c 	ret = mv_xor_add_io_win(mv_chan, dest);
mv_chan           576 drivers/dma/mv_xor.c 	sw_desc = mv_chan_alloc_slot(mv_chan);
mv_chan           581 drivers/dma/mv_xor.c 		if (mv_chan->op_in_desc == XOR_MODE_IN_DESC)
mv_chan           585 drivers/dma/mv_xor.c 			ret = mv_xor_add_io_win(mv_chan, src[src_cnt]);
mv_chan           592 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan),
mv_chan           612 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan);
mv_chan           616 drivers/dma/mv_xor.c 	src = mv_chan->dummy_src_addr;
mv_chan           617 drivers/dma/mv_xor.c 	dest = mv_chan->dummy_dst_addr;
mv_chan           629 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan);
mv_chan           633 drivers/dma/mv_xor.c 	spin_lock_bh(&mv_chan->lock);
mv_chan           635 drivers/dma/mv_xor.c 	mv_chan_slot_cleanup(mv_chan);
mv_chan           637 drivers/dma/mv_xor.c 	list_for_each_entry_safe(iter, _iter, &mv_chan->chain,
mv_chan           640 drivers/dma/mv_xor.c 		list_move_tail(&iter->node, &mv_chan->free_slots);
mv_chan           642 drivers/dma/mv_xor.c 	list_for_each_entry_safe(iter, _iter, &mv_chan->completed_slots,
mv_chan           645 drivers/dma/mv_xor.c 		list_move_tail(&iter->node, &mv_chan->free_slots);
mv_chan           647 drivers/dma/mv_xor.c 	list_for_each_entry_safe(iter, _iter, &mv_chan->allocated_slots,
mv_chan           650 drivers/dma/mv_xor.c 		list_move_tail(&iter->node, &mv_chan->free_slots);
mv_chan           653 drivers/dma/mv_xor.c 		iter, _iter, &mv_chan->free_slots, node) {
mv_chan           656 drivers/dma/mv_xor.c 		mv_chan->slots_allocated--;
mv_chan           659 drivers/dma/mv_xor.c 	dev_dbg(mv_chan_to_devp(mv_chan), "%s slots_allocated %d\n",
mv_chan           660 drivers/dma/mv_xor.c 		__func__, mv_chan->slots_allocated);
mv_chan           661 drivers/dma/mv_xor.c 	spin_unlock_bh(&mv_chan->lock);
mv_chan           664 drivers/dma/mv_xor.c 		dev_err(mv_chan_to_devp(mv_chan),
mv_chan           678 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan);
mv_chan           685 drivers/dma/mv_xor.c 	spin_lock_bh(&mv_chan->lock);
mv_chan           686 drivers/dma/mv_xor.c 	mv_chan_slot_cleanup(mv_chan);
mv_chan           687 drivers/dma/mv_xor.c 	spin_unlock_bh(&mv_chan->lock);
mv_chan           749 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan = to_mv_xor_chan(chan);
mv_chan           751 drivers/dma/mv_xor.c 	if (mv_chan->pending >= MV_XOR_THRESHOLD) {
mv_chan           752 drivers/dma/mv_xor.c 		mv_chan->pending = 0;
mv_chan           753 drivers/dma/mv_xor.c 		mv_chan_activate(mv_chan);
mv_chan           761 drivers/dma/mv_xor.c static int mv_chan_memcpy_self_test(struct mv_xor_chan *mv_chan)
mv_chan           786 drivers/dma/mv_xor.c 	dma_chan = &mv_chan->dmachan;
mv_chan           872 drivers/dma/mv_xor.c mv_chan_xor_self_test(struct mv_xor_chan *mv_chan)
mv_chan           919 drivers/dma/mv_xor.c 	dma_chan = &mv_chan->dmachan;
mv_chan          1009 drivers/dma/mv_xor.c static int mv_xor_channel_remove(struct mv_xor_chan *mv_chan)
mv_chan          1012 drivers/dma/mv_xor.c 	struct device *dev = mv_chan->dmadev.dev;
mv_chan          1014 drivers/dma/mv_xor.c 	dma_async_device_unregister(&mv_chan->dmadev);
mv_chan          1017 drivers/dma/mv_xor.c 			  mv_chan->dma_desc_pool_virt, mv_chan->dma_desc_pool);
mv_chan          1018 drivers/dma/mv_xor.c 	dma_unmap_single(dev, mv_chan->dummy_src_addr,
mv_chan          1020 drivers/dma/mv_xor.c 	dma_unmap_single(dev, mv_chan->dummy_dst_addr,
mv_chan          1023 drivers/dma/mv_xor.c 	list_for_each_entry_safe(chan, _chan, &mv_chan->dmadev.channels,
mv_chan          1028 drivers/dma/mv_xor.c 	free_irq(mv_chan->irq, mv_chan);
mv_chan          1039 drivers/dma/mv_xor.c 	struct mv_xor_chan *mv_chan;
mv_chan          1042 drivers/dma/mv_xor.c 	mv_chan = devm_kzalloc(&pdev->dev, sizeof(*mv_chan), GFP_KERNEL);
mv_chan          1043 drivers/dma/mv_xor.c 	if (!mv_chan)
mv_chan          1046 drivers/dma/mv_xor.c 	mv_chan->idx = idx;
mv_chan          1047 drivers/dma/mv_xor.c 	mv_chan->irq = irq;
mv_chan          1049 drivers/dma/mv_xor.c 		mv_chan->op_in_desc = XOR_MODE_IN_REG;
mv_chan          1051 drivers/dma/mv_xor.c 		mv_chan->op_in_desc = XOR_MODE_IN_DESC;
mv_chan          1053 drivers/dma/mv_xor.c 	dma_dev = &mv_chan->dmadev;
mv_chan          1055 drivers/dma/mv_xor.c 	mv_chan->xordev = xordev;
mv_chan          1062 drivers/dma/mv_xor.c 	mv_chan->dummy_src_addr = dma_map_single(dma_dev->dev,
mv_chan          1063 drivers/dma/mv_xor.c 		mv_chan->dummy_src, MV_XOR_MIN_BYTE_COUNT, DMA_FROM_DEVICE);
mv_chan          1064 drivers/dma/mv_xor.c 	mv_chan->dummy_dst_addr = dma_map_single(dma_dev->dev,
mv_chan          1065 drivers/dma/mv_xor.c 		mv_chan->dummy_dst, MV_XOR_MIN_BYTE_COUNT, DMA_TO_DEVICE);
mv_chan          1071 drivers/dma/mv_xor.c 	mv_chan->dma_desc_pool_virt =
mv_chan          1072 drivers/dma/mv_xor.c 	  dma_alloc_wc(&pdev->dev, MV_XOR_POOL_SIZE, &mv_chan->dma_desc_pool,
mv_chan          1074 drivers/dma/mv_xor.c 	if (!mv_chan->dma_desc_pool_virt)
mv_chan          1098 drivers/dma/mv_xor.c 	mv_chan->mmr_base = xordev->xor_base;
mv_chan          1099 drivers/dma/mv_xor.c 	mv_chan->mmr_high_base = xordev->xor_high_base;
mv_chan          1100 drivers/dma/mv_xor.c 	tasklet_init(&mv_chan->irq_tasklet, mv_xor_tasklet, (unsigned long)
mv_chan          1101 drivers/dma/mv_xor.c 		     mv_chan);
mv_chan          1104 drivers/dma/mv_xor.c 	mv_chan_clear_err_status(mv_chan);
mv_chan          1106 drivers/dma/mv_xor.c 	ret = request_irq(mv_chan->irq, mv_xor_interrupt_handler,
mv_chan          1107 drivers/dma/mv_xor.c 			  0, dev_name(&pdev->dev), mv_chan);
mv_chan          1111 drivers/dma/mv_xor.c 	mv_chan_unmask_interrupts(mv_chan);
mv_chan          1113 drivers/dma/mv_xor.c 	if (mv_chan->op_in_desc == XOR_MODE_IN_DESC)
mv_chan          1114 drivers/dma/mv_xor.c 		mv_chan_set_mode(mv_chan, XOR_OPERATION_MODE_IN_DESC);
mv_chan          1116 drivers/dma/mv_xor.c 		mv_chan_set_mode(mv_chan, XOR_OPERATION_MODE_XOR);
mv_chan          1118 drivers/dma/mv_xor.c 	spin_lock_init(&mv_chan->lock);
mv_chan          1119 drivers/dma/mv_xor.c 	INIT_LIST_HEAD(&mv_chan->chain);
mv_chan          1120 drivers/dma/mv_xor.c 	INIT_LIST_HEAD(&mv_chan->completed_slots);
mv_chan          1121 drivers/dma/mv_xor.c 	INIT_LIST_HEAD(&mv_chan->free_slots);
mv_chan          1122 drivers/dma/mv_xor.c 	INIT_LIST_HEAD(&mv_chan->allocated_slots);
mv_chan          1123 drivers/dma/mv_xor.c 	mv_chan->dmachan.device = dma_dev;
mv_chan          1124 drivers/dma/mv_xor.c 	dma_cookie_init(&mv_chan->dmachan);
mv_chan          1126 drivers/dma/mv_xor.c 	list_add_tail(&mv_chan->dmachan.device_node, &dma_dev->channels);
mv_chan          1129 drivers/dma/mv_xor.c 		ret = mv_chan_memcpy_self_test(mv_chan);
mv_chan          1136 drivers/dma/mv_xor.c 		ret = mv_chan_xor_self_test(mv_chan);
mv_chan          1143 drivers/dma/mv_xor.c 		 mv_chan->op_in_desc ? "Descriptor Mode" : "Registers Mode",
mv_chan          1152 drivers/dma/mv_xor.c 	return mv_chan;
mv_chan          1155 drivers/dma/mv_xor.c 	free_irq(mv_chan->irq, mv_chan);
mv_chan          1158 drivers/dma/mv_xor.c 			  mv_chan->dma_desc_pool_virt, mv_chan->dma_desc_pool);
mv_chan          1238 drivers/dma/mv_xor.c 		struct mv_xor_chan *mv_chan = xordev->channels[i];
mv_chan          1240 drivers/dma/mv_xor.c 		if (!mv_chan)
mv_chan          1243 drivers/dma/mv_xor.c 		mv_chan->saved_config_reg =
mv_chan          1244 drivers/dma/mv_xor.c 			readl_relaxed(XOR_CONFIG(mv_chan));
mv_chan          1245 drivers/dma/mv_xor.c 		mv_chan->saved_int_mask_reg =
mv_chan          1246 drivers/dma/mv_xor.c 			readl_relaxed(XOR_INTR_MASK(mv_chan));
mv_chan          1259 drivers/dma/mv_xor.c 		struct mv_xor_chan *mv_chan = xordev->channels[i];
mv_chan          1261 drivers/dma/mv_xor.c 		if (!mv_chan)
mv_chan          1264 drivers/dma/mv_xor.c 		writel_relaxed(mv_chan->saved_config_reg,
mv_chan          1265 drivers/dma/mv_xor.c 			       XOR_CONFIG(mv_chan));
mv_chan          1266 drivers/dma/mv_xor.c 		writel_relaxed(mv_chan->saved_int_mask_reg,
mv_chan          1267 drivers/dma/mv_xor.c 			       XOR_INTR_MASK(mv_chan));