eng               236 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	const unsigned eng = 17;
eng               252 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 			tmp = RREG32_NO_KIQ(hub->vm_inv_eng0_sem + eng);
eng               262 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	WREG32_NO_KIQ(hub->vm_inv_eng0_req + eng, inv_req);
eng               269 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 		RREG32_NO_KIQ(hub->vm_inv_eng0_req + eng);
eng               273 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 		tmp = RREG32_NO_KIQ(hub->vm_inv_eng0_ack + eng);
eng               288 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 		WREG32_NO_KIQ(hub->vm_inv_eng0_sem + eng, 0);
eng               374 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	unsigned eng = ring->vm_inv_eng;
eng               388 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 					  hub->vm_inv_eng0_sem + eng, 0x1, 0x1);
eng               396 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 	amdgpu_ring_emit_reg_write_reg_wait(ring, hub->vm_inv_eng0_req + eng,
eng               397 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 					    hub->vm_inv_eng0_ack + eng,
eng               407 drivers/gpu/drm/amd/amdgpu/gmc_v10_0.c 		amdgpu_ring_emit_wreg(ring, hub->vm_inv_eng0_sem + eng, 0);
eng               489 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	const unsigned eng = 17;
eng               504 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		uint32_t req = hub->vm_inv_eng0_req + eng;
eng               505 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		uint32_t ack = hub->vm_inv_eng0_ack + eng;
eng               525 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 			tmp = RREG32_NO_KIQ(hub->vm_inv_eng0_sem + eng);
eng               535 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	WREG32_NO_KIQ(hub->vm_inv_eng0_req + eng, inv_req);
eng               542 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		RREG32_NO_KIQ(hub->vm_inv_eng0_req + eng);
eng               545 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		tmp = RREG32_NO_KIQ(hub->vm_inv_eng0_ack + eng);
eng               557 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		WREG32_NO_KIQ(hub->vm_inv_eng0_sem + eng, 0);
eng               574 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	unsigned eng = ring->vm_inv_eng;
eng               587 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 					  hub->vm_inv_eng0_sem + eng, 0x1, 0x1);
eng               595 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	amdgpu_ring_emit_reg_write_reg_wait(ring, hub->vm_inv_eng0_req + eng,
eng               596 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 					    hub->vm_inv_eng0_ack + eng,
eng               605 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 		amdgpu_ring_emit_wreg(ring, hub->vm_inv_eng0_sem + eng, 0);
eng               120 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_kmap(chan->eng);
eng               121 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x00, 0x00000000);
eng               122 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x04, 0x00000000);
eng               123 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x08, 0x00000000);
eng               124 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x0c, 0x00000000);
eng               125 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x10, 0x00000000);
eng               126 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x14, 0x00000000);
eng               127 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_done(chan->eng);
eng               147 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_kmap(chan->eng);
eng               148 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x00, 0x00190000);
eng               149 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x04, lower_32_bits(limit));
eng               150 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x08, lower_32_bits(start));
eng               151 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x0c, upper_32_bits(limit) << 24 |
eng               153 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x10, 0x00000000);
eng               154 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_wo32(chan->eng, offset + 0x14, 0x00000000);
eng               155 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 	nvkm_done(chan->eng);
eng               264 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c 			      &chan->eng);
eng                88 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_kmap(chan->eng);
eng                89 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_wo32(chan->eng, offset + 0x00, 0x00000000);
eng                90 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_wo32(chan->eng, offset + 0x04, 0x00000000);
eng                91 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_wo32(chan->eng, offset + 0x08, 0x00000000);
eng                92 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_wo32(chan->eng, offset + 0x0c, 0x00000000);
eng                93 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_wo32(chan->eng, offset + 0x10, 0x00000000);
eng                94 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_wo32(chan->eng, offset + 0x14, 0x00000000);
eng                95 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 		nvkm_done(chan->eng);
eng               116 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_kmap(chan->eng);
eng               117 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_wo32(chan->eng, offset + 0x00, 0x00190000);
eng               118 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_wo32(chan->eng, offset + 0x04, lower_32_bits(limit));
eng               119 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_wo32(chan->eng, offset + 0x08, lower_32_bits(start));
eng               120 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_wo32(chan->eng, offset + 0x0c, upper_32_bits(limit) << 24 |
eng               122 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_wo32(chan->eng, offset + 0x10, 0x00000000);
eng               123 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_wo32(chan->eng, offset + 0x14, 0x00000000);
eng               124 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_done(chan->eng);
eng               211 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 	nvkm_gpuobj_del(&chan->eng);
eng               258 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c 			      &chan->eng);
eng                14 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.h 	struct nvkm_gpuobj *eng;
eng                43 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 	struct gf100_clk_info eng[16];
eng               276 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 	struct gf100_clk_info *info = &clk->eng[idx];
eng               346 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 	struct gf100_clk_info *info = &clk->eng[idx];
eng               368 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 	struct gf100_clk_info *info = &clk->eng[idx];
eng               395 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 	struct gf100_clk_info *info = &clk->eng[idx];
eng               410 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 	struct gf100_clk_info *info = &clk->eng[idx];
eng               431 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 		for (j = 0; j < ARRAY_SIZE(clk->eng); j++) {
eng               432 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 			if (!clk->eng[j].freq)
eng               445 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c 	memset(clk->eng, 0x00, sizeof(clk->eng));
eng                43 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	struct gk104_clk_info eng[16];
eng               290 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	struct gk104_clk_info *info = &clk->eng[idx];
eng               359 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	struct gk104_clk_info *info = &clk->eng[idx];
eng               388 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	struct gk104_clk_info *info = &clk->eng[idx];
eng               413 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	struct gk104_clk_info *info = &clk->eng[idx];
eng               424 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	struct gk104_clk_info *info = &clk->eng[idx];
eng               439 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	struct gk104_clk_info *info = &clk->eng[idx];
eng               466 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 		for (j = 0; j < ARRAY_SIZE(clk->eng); j++) {
eng               469 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 			if (!clk->eng[j].freq)
eng               482 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c 	memset(clk->eng, 0x00, sizeof(clk->eng));
eng                36 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info eng[nv_clk_src_max];
eng               278 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 				 &clk->eng[dom]);
eng               289 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info *info = &clk->eng[nv_clk_src_host];
eng               365 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info *info = &clk->eng[dom];
eng               409 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info *info = &clk->eng[dom];
eng               417 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info *info = &clk->eng[nv_clk_src_host];
eng               445 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info *info = &clk->eng[dom];
eng               462 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info *core = &clk->eng[nv_clk_src_core];
eng               477 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 				     &clk->eng[nv_clk_src_core_intm]);
eng               489 drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c 	struct gt215_clk_info *core = &clk->eng[nv_clk_src_core];
eng               552 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_regs.h #define    CN23XX_DPI_DMA_ENG_ENB(eng) (CN23XX_DPI_DMA_ENG0_ENB + ((eng) * 8))
eng               565 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_regs.h #define    CN23XX_DPI_DMA_ENG_BUF(eng)   \
eng               566 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_regs.h 		(CN23XX_DPI_DMA_ENG0_BUF + ((eng) * 8))
eng              2010 drivers/net/ethernet/qlogic/qed/qed_debug.c 			     fw_info.ver.num.eng) < 0)
eng               668 drivers/net/ethernet/qlogic/qed/qed_dev.c 	enum qed_eng eng;
eng               681 drivers/net/ethernet/qlogic/qed/qed_dev.c 		eng = cdev->fir_affin ? QED_ENG1 : QED_ENG0;
eng               682 drivers/net/ethernet/qlogic/qed/qed_dev.c 		rc = qed_llh_set_roce_affinity(cdev, eng);
eng               692 drivers/net/ethernet/qlogic/qed/qed_dev.c 			   eng);
eng               697 drivers/net/ethernet/qlogic/qed/qed_dev.c 		eng = cdev->fir_affin ? QED_ENG1 : QED_ENG0;
eng               699 drivers/net/ethernet/qlogic/qed/qed_dev.c 		eng = QED_BOTH_ENG;
eng               702 drivers/net/ethernet/qlogic/qed/qed_dev.c 		rc = qed_llh_set_ppfid_affinity(cdev, ppfid, eng);
eng               713 drivers/net/ethernet/qlogic/qed/qed_dev.c 		   eng);
eng               764 drivers/net/ethernet/qlogic/qed/qed_dev.c int qed_llh_set_ppfid_affinity(struct qed_dev *cdev, u8 ppfid, enum qed_eng eng)
eng               782 drivers/net/ethernet/qlogic/qed/qed_dev.c 	switch (eng) {
eng               793 drivers/net/ethernet/qlogic/qed/qed_dev.c 		DP_NOTICE(cdev, "Invalid affinity value for ppfid [%d]\n", eng);
eng               805 drivers/net/ethernet/qlogic/qed/qed_dev.c 		cdev->iwarp_affin = (eng == QED_ENG1) ? 1 : 0;
eng               812 drivers/net/ethernet/qlogic/qed/qed_dev.c int qed_llh_set_roce_affinity(struct qed_dev *cdev, enum qed_eng eng)
eng               826 drivers/net/ethernet/qlogic/qed/qed_dev.c 	switch (eng) {
eng               839 drivers/net/ethernet/qlogic/qed/qed_dev.c 		DP_NOTICE(cdev, "Invalid affinity value for RoCE [%d]\n", eng);
eng               403 drivers/net/ethernet/qlogic/qed/qed_dev_api.h 			       u8 ppfid, enum qed_eng eng);
eng               413 drivers/net/ethernet/qlogic/qed/qed_dev_api.h int qed_llh_set_roce_affinity(struct qed_dev *cdev, enum qed_eng eng);
eng              2693 drivers/net/ethernet/qlogic/qed/qed_hsi.h 	u8 eng;
eng              1919 drivers/net/ethernet/qlogic/qed/qed_rdma.c 	enum qed_eng eng;
eng              1928 drivers/net/ethernet/qlogic/qed/qed_rdma.c 		eng = QED_BOTH_ENG;
eng              1930 drivers/net/ethernet/qlogic/qed/qed_rdma.c 		eng = cdev->l2_affin_hint ? QED_ENG1 : QED_ENG0;
eng              1932 drivers/net/ethernet/qlogic/qed/qed_rdma.c 	rc = qed_llh_set_ppfid_affinity(cdev, ppfid, eng);
eng              1942 drivers/net/ethernet/qlogic/qed/qed_rdma.c 		   eng);
eng                29 drivers/staging/kpc2000/kpc_dma/dma.c 	struct kpc_dma_device *eng = container_of(ws, struct kpc_dma_device, irq_work);
eng                31 drivers/staging/kpc2000/kpc_dma/dma.c 	lock_engine(eng);
eng                33 drivers/staging/kpc2000/kpc_dma/dma.c 	if (GetEngineCompletePtr(eng) == 0)
eng                36 drivers/staging/kpc2000/kpc_dma/dma.c 	if (eng->desc_completed->MyDMAAddr == GetEngineCompletePtr(eng))
eng                39 drivers/staging/kpc2000/kpc_dma/dma.c 	cur = eng->desc_completed;
eng                42 drivers/staging/kpc2000/kpc_dma/dma.c 		dev_dbg(&eng->pldev->dev, "Handling completed descriptor %p (acd = %p)\n", cur, cur->acd);
eng                43 drivers/staging/kpc2000/kpc_dma/dma.c 		BUG_ON(cur == eng->desc_next); // Ordering failure.
eng                46 drivers/staging/kpc2000/kpc_dma/dma.c 			eng->accumulated_bytes = 0;
eng                47 drivers/staging/kpc2000/kpc_dma/dma.c 			eng->accumulated_flags = 0;
eng                50 drivers/staging/kpc2000/kpc_dma/dma.c 		eng->accumulated_bytes += cur->DescByteCount;
eng                52 drivers/staging/kpc2000/kpc_dma/dma.c 			eng->accumulated_flags |= ACD_FLAG_ENG_ACCUM_ERROR;
eng                55 drivers/staging/kpc2000/kpc_dma/dma.c 			eng->accumulated_flags |= ACD_FLAG_ENG_ACCUM_SHORT;
eng                59 drivers/staging/kpc2000/kpc_dma/dma.c 				transfer_complete_cb(cur->acd, eng->accumulated_bytes, eng->accumulated_flags | ACD_FLAG_DONE);
eng                62 drivers/staging/kpc2000/kpc_dma/dma.c 		eng->desc_completed = cur;
eng                63 drivers/staging/kpc2000/kpc_dma/dma.c 	} while (cur->MyDMAAddr != GetEngineCompletePtr(eng));
eng                66 drivers/staging/kpc2000/kpc_dma/dma.c 	SetClearEngineControl(eng, ENG_CTL_IRQ_ACTIVE, 0);
eng                68 drivers/staging/kpc2000/kpc_dma/dma.c 	unlock_engine(eng);
eng                72 drivers/staging/kpc2000/kpc_dma/dma.c void  start_dma_engine(struct kpc_dma_device *eng)
eng                74 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_next       = eng->desc_pool_first;
eng                75 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_completed  = eng->desc_pool_last;
eng                78 drivers/staging/kpc2000/kpc_dma/dma.c 	SetEngineNextPtr(eng, eng->desc_pool_first);
eng                79 drivers/staging/kpc2000/kpc_dma/dma.c 	SetEngineSWPtr(eng, eng->desc_pool_first);
eng                80 drivers/staging/kpc2000/kpc_dma/dma.c 	ClearEngineCompletePtr(eng);
eng                82 drivers/staging/kpc2000/kpc_dma/dma.c 	WriteEngineControl(eng, ENG_CTL_DMA_ENABLE | ENG_CTL_IRQ_ENABLE);
eng                85 drivers/staging/kpc2000/kpc_dma/dma.c int  setup_dma_engine(struct kpc_dma_device *eng, u32 desc_cnt)
eng                95 drivers/staging/kpc2000/kpc_dma/dma.c 	caps = GetEngineCapabilities(eng);
eng                97 drivers/staging/kpc2000/kpc_dma/dma.c 	if (WARN(!(caps & ENG_CAP_PRESENT), "%s() called for DMA Engine at %p which isn't present in hardware!\n", __func__, eng))
eng               101 drivers/staging/kpc2000/kpc_dma/dma.c 		eng->dir = DMA_FROM_DEVICE;
eng               103 drivers/staging/kpc2000/kpc_dma/dma.c 		eng->dir = DMA_TO_DEVICE;
eng               106 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_pool_cnt = desc_cnt;
eng               107 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_pool = dma_pool_create("KPC DMA Descriptors", &eng->pldev->dev, sizeof(struct kpc_dma_descriptor), DMA_DESC_ALIGNMENT, 4096);
eng               109 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_pool_first = dma_pool_alloc(eng->desc_pool, GFP_KERNEL | GFP_DMA, &head_handle);
eng               110 drivers/staging/kpc2000/kpc_dma/dma.c 	if (!eng->desc_pool_first) {
eng               111 drivers/staging/kpc2000/kpc_dma/dma.c 		dev_err(&eng->pldev->dev, "%s: couldn't allocate desc_pool_first!\n", __func__);
eng               112 drivers/staging/kpc2000/kpc_dma/dma.c 		dma_pool_destroy(eng->desc_pool);
eng               116 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_pool_first->MyDMAAddr = head_handle;
eng               117 drivers/staging/kpc2000/kpc_dma/dma.c 	clear_desc(eng->desc_pool_first);
eng               119 drivers/staging/kpc2000/kpc_dma/dma.c 	cur = eng->desc_pool_first;
eng               120 drivers/staging/kpc2000/kpc_dma/dma.c 	for (i = 1 ; i < eng->desc_pool_cnt ; i++) {
eng               121 drivers/staging/kpc2000/kpc_dma/dma.c 		next = dma_pool_alloc(eng->desc_pool, GFP_KERNEL | GFP_DMA, &next_handle);
eng               135 drivers/staging/kpc2000/kpc_dma/dma.c 	cur->Next = eng->desc_pool_first;
eng               136 drivers/staging/kpc2000/kpc_dma/dma.c 	cur->DescNextDescPtr = eng->desc_pool_first->MyDMAAddr;
eng               138 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_pool_last = cur;
eng               139 drivers/staging/kpc2000/kpc_dma/dma.c 	eng->desc_completed = eng->desc_pool_last;
eng               142 drivers/staging/kpc2000/kpc_dma/dma.c 	INIT_WORK(&eng->irq_work, ndd_irq_worker);
eng               145 drivers/staging/kpc2000/kpc_dma/dma.c 	rv = request_irq(eng->irq, ndd_irq_handler, IRQF_SHARED, KP_DRIVER_NAME_DMA_CONTROLLER, eng);
eng               147 drivers/staging/kpc2000/kpc_dma/dma.c 		dev_err(&eng->pldev->dev, "%s: failed to request_irq: %d\n", __func__, rv);
eng               152 drivers/staging/kpc2000/kpc_dma/dma.c 	start_dma_engine(eng);
eng               153 drivers/staging/kpc2000/kpc_dma/dma.c 	unlock_engine(eng);
eng               158 drivers/staging/kpc2000/kpc_dma/dma.c void  stop_dma_engine(struct kpc_dma_device *eng)
eng               163 drivers/staging/kpc2000/kpc_dma/dma.c 	WriteEngineControl(eng, 0);
eng               167 drivers/staging/kpc2000/kpc_dma/dma.c 	while (GetEngineControl(eng) & ENG_CTL_DMA_RUNNING) {
eng               169 drivers/staging/kpc2000/kpc_dma/dma.c 			dev_crit(&eng->pldev->dev, "DMA_RUNNING still asserted!\n");
eng               175 drivers/staging/kpc2000/kpc_dma/dma.c 	WriteEngineControl(eng, ENG_CTL_DMA_RESET_REQUEST);
eng               179 drivers/staging/kpc2000/kpc_dma/dma.c 	while (GetEngineControl(eng) & (ENG_CTL_DMA_RUNNING | ENG_CTL_DMA_RESET_REQUEST)) {
eng               181 drivers/staging/kpc2000/kpc_dma/dma.c 			dev_crit(&eng->pldev->dev, "ENG_CTL_DMA_RESET_REQUEST still asserted!\n");
eng               187 drivers/staging/kpc2000/kpc_dma/dma.c 	WriteEngineControl(eng, ENG_CTL_DMA_RESET);
eng               191 drivers/staging/kpc2000/kpc_dma/dma.c 	while (GetEngineControl(eng) & ENG_CTL_DMA_RESET) {
eng               193 drivers/staging/kpc2000/kpc_dma/dma.c 			dev_crit(&eng->pldev->dev, "DMA_RESET still asserted!\n");
eng               199 drivers/staging/kpc2000/kpc_dma/dma.c 	SetClearEngineControl(eng, (ENG_CTL_IRQ_ACTIVE | ENG_CTL_DESC_COMPLETE | ENG_CTL_DESC_ALIGN_ERR | ENG_CTL_DESC_FETCH_ERR | ENG_CTL_SW_ABORT_ERR | ENG_CTL_DESC_CHAIN_END | ENG_CTL_DMA_WAITING_PERSIST), 0);
eng               204 drivers/staging/kpc2000/kpc_dma/dma.c 	WriteEngineControl(eng, 0);
eng               207 drivers/staging/kpc2000/kpc_dma/dma.c void  destroy_dma_engine(struct kpc_dma_device *eng)
eng               213 drivers/staging/kpc2000/kpc_dma/dma.c 	stop_dma_engine(eng);
eng               215 drivers/staging/kpc2000/kpc_dma/dma.c 	cur = eng->desc_pool_first;
eng               216 drivers/staging/kpc2000/kpc_dma/dma.c 	cur_handle = eng->desc_pool_first->MyDMAAddr;
eng               218 drivers/staging/kpc2000/kpc_dma/dma.c 	for (i = 0 ; i < eng->desc_pool_cnt ; i++) {
eng               222 drivers/staging/kpc2000/kpc_dma/dma.c 		dma_pool_free(eng->desc_pool, cur, cur_handle);
eng               227 drivers/staging/kpc2000/kpc_dma/dma.c 	dma_pool_destroy(eng->desc_pool);
eng               229 drivers/staging/kpc2000/kpc_dma/dma.c 	free_irq(eng->irq, eng);
eng               233 drivers/staging/kpc2000/kpc_dma/dma.c int  count_descriptors_available(struct kpc_dma_device *eng)
eng               236 drivers/staging/kpc2000/kpc_dma/dma.c 	struct kpc_dma_descriptor *cur = eng->desc_next;
eng               238 drivers/staging/kpc2000/kpc_dma/dma.c 	while (cur != eng->desc_completed) {
eng               273 drivers/staging/kpc2000/kpc_dma/fileops.c 	struct kpc_dma_device *eng = priv->ldev;
eng               275 drivers/staging/kpc2000/kpc_dma/fileops.c 	lock_engine(eng);
eng               277 drivers/staging/kpc2000/kpc_dma/fileops.c 	stop_dma_engine(eng);
eng               279 drivers/staging/kpc2000/kpc_dma/fileops.c 	cur = eng->desc_completed->Next;
eng               280 drivers/staging/kpc2000/kpc_dma/fileops.c 	while (cur != eng->desc_next) {
eng               281 drivers/staging/kpc2000/kpc_dma/fileops.c 		dev_dbg(&eng->pldev->dev, "Aborting descriptor %p (acd = %p)\n", cur, cur->acd);
eng               288 drivers/staging/kpc2000/kpc_dma/fileops.c 		eng->desc_completed = cur;
eng               293 drivers/staging/kpc2000/kpc_dma/fileops.c 	start_dma_engine(eng);
eng               295 drivers/staging/kpc2000/kpc_dma/fileops.c 	unlock_engine(eng);
eng               147 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h u32  GetEngineCapabilities(struct kpc_dma_device *eng)
eng               149 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	return readl(eng->eng_regs + 0);
eng               153 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  WriteEngineControl(struct kpc_dma_device *eng, u32 value)
eng               155 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	writel(value, eng->eng_regs + 1);
eng               159 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h u32  GetEngineControl(struct kpc_dma_device *eng)
eng               161 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	return readl(eng->eng_regs + 1);
eng               165 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  SetClearEngineControl(struct kpc_dma_device *eng, u32 set_bits, u32 clear_bits)
eng               167 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	u32 val = GetEngineControl(eng);
eng               171 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	WriteEngineControl(eng, val);
eng               175 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  SetEngineNextPtr(struct kpc_dma_device *eng, struct kpc_dma_descriptor *desc)
eng               177 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	writel(desc->MyDMAAddr, eng->eng_regs + 2);
eng               181 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  SetEngineSWPtr(struct kpc_dma_device *eng, struct kpc_dma_descriptor *desc)
eng               183 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	writel(desc->MyDMAAddr, eng->eng_regs + 3);
eng               187 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  ClearEngineCompletePtr(struct kpc_dma_device *eng)
eng               189 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	writel(0, eng->eng_regs + 4);
eng               193 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h u32  GetEngineCompletePtr(struct kpc_dma_device *eng)
eng               195 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	return readl(eng->eng_regs + 4);
eng               199 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  lock_engine(struct kpc_dma_device *eng)
eng               201 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	BUG_ON(eng == NULL);
eng               202 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	mutex_lock(&eng->sem);
eng               206 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  unlock_engine(struct kpc_dma_device *eng)
eng               208 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	BUG_ON(eng == NULL);
eng               209 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h 	mutex_unlock(&eng->sem);
eng               213 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  start_dma_engine(struct kpc_dma_device *eng);
eng               214 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h int   setup_dma_engine(struct kpc_dma_device *eng, u32 desc_cnt);
eng               215 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  stop_dma_engine(struct kpc_dma_device *eng);
eng               216 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h void  destroy_dma_engine(struct kpc_dma_device *eng);
eng               218 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h int   count_descriptors_available(struct kpc_dma_device *eng);