tce               316 arch/powerpc/include/asm/iommu.h extern enum dma_data_direction iommu_tce_direction(unsigned long tce);
tce               190 arch/powerpc/include/asm/kvm_ppc.h 			     unsigned long ioba, unsigned long tce);
tce               754 arch/powerpc/include/asm/kvm_ppc.h 			 unsigned long ioba, unsigned long tce);
tce               961 arch/powerpc/kernel/iommu.c enum dma_data_direction iommu_tce_direction(unsigned long tce)
tce               963 arch/powerpc/kernel/iommu.c 	if ((tce & TCE_PCI_READ) && (tce & TCE_PCI_WRITE))
tce               965 arch/powerpc/kernel/iommu.c 	else if (tce & TCE_PCI_READ)
tce               967 arch/powerpc/kernel/iommu.c 	else if (tce & TCE_PCI_WRITE)
tce               333 arch/powerpc/kvm/book3s_64_vio.c static long kvmppc_tce_to_ua(struct kvm *kvm, unsigned long tce,
tce               336 arch/powerpc/kvm/book3s_64_vio.c 	unsigned long gfn = tce >> PAGE_SHIFT;
tce               344 arch/powerpc/kvm/book3s_64_vio.c 		(tce & ~(PAGE_MASK | TCE_PCI_READ | TCE_PCI_WRITE));
tce               350 arch/powerpc/kvm/book3s_64_vio.c 		unsigned long tce)
tce               352 arch/powerpc/kvm/book3s_64_vio.c 	unsigned long gpa = tce & ~(TCE_PCI_READ | TCE_PCI_WRITE);
tce               353 arch/powerpc/kvm/book3s_64_vio.c 	enum dma_data_direction dir = iommu_tce_direction(tce);
tce               364 arch/powerpc/kvm/book3s_64_vio.c 	if (kvmppc_tce_to_ua(stt->kvm, tce, &ua))
tce               389 arch/powerpc/kvm/book3s_64_vio.c 		unsigned long idx, unsigned long tce)
tce               401 arch/powerpc/kvm/book3s_64_vio.c 		if (!tce)
tce               410 arch/powerpc/kvm/book3s_64_vio.c 	tbl[idx % TCES_PER_PAGE] = tce;
tce               541 arch/powerpc/kvm/book3s_64_vio.c 		      unsigned long ioba, unsigned long tce)
tce               562 arch/powerpc/kvm/book3s_64_vio.c 	ret = kvmppc_tce_validate(stt, tce);
tce               566 arch/powerpc/kvm/book3s_64_vio.c 	dir = iommu_tce_direction(tce);
tce               568 arch/powerpc/kvm/book3s_64_vio.c 	if ((dir != DMA_NONE) && kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) {
tce               591 arch/powerpc/kvm/book3s_64_vio.c 	kvmppc_tce_put(stt, entry, tce);
tce               608 arch/powerpc/kvm/book3s_64_vio.c 	u64 tce;
tce               638 arch/powerpc/kvm/book3s_64_vio.c 		if (get_user(tce, tces + i)) {
tce               642 arch/powerpc/kvm/book3s_64_vio.c 		tce = be64_to_cpu(tce);
tce               644 arch/powerpc/kvm/book3s_64_vio.c 		ret = kvmppc_tce_validate(stt, tce);
tce               660 arch/powerpc/kvm/book3s_64_vio.c 		if (get_user(tce, tces + i)) {
tce               664 arch/powerpc/kvm/book3s_64_vio.c 		tce = be64_to_cpu(tce);
tce               666 arch/powerpc/kvm/book3s_64_vio.c 		if (kvmppc_tce_to_ua(vcpu->kvm, tce, &ua)) {
tce               674 arch/powerpc/kvm/book3s_64_vio.c 					iommu_tce_direction(tce));
tce               683 arch/powerpc/kvm/book3s_64_vio.c 		kvmppc_tce_put(stt, entry + i, tce);
tce                78 arch/powerpc/kvm/book3s_64_vio_hv.c static long kvmppc_rm_tce_to_ua(struct kvm *kvm, unsigned long tce,
tce                81 arch/powerpc/kvm/book3s_64_vio_hv.c 	unsigned long gfn = tce >> PAGE_SHIFT;
tce                89 arch/powerpc/kvm/book3s_64_vio_hv.c 		(tce & ~(PAGE_MASK | TCE_PCI_READ | TCE_PCI_WRITE));
tce               106 arch/powerpc/kvm/book3s_64_vio_hv.c 		unsigned long tce)
tce               108 arch/powerpc/kvm/book3s_64_vio_hv.c 	unsigned long gpa = tce & ~(TCE_PCI_READ | TCE_PCI_WRITE);
tce               109 arch/powerpc/kvm/book3s_64_vio_hv.c 	enum dma_data_direction dir = iommu_tce_direction(tce);
tce               120 arch/powerpc/kvm/book3s_64_vio_hv.c 	if (kvmppc_rm_tce_to_ua(stt->kvm, tce, &ua, NULL))
tce               172 arch/powerpc/kvm/book3s_64_vio_hv.c 		unsigned long idx, unsigned long tce)
tce               186 arch/powerpc/kvm/book3s_64_vio_hv.c 	tbl[idx % TCES_PER_PAGE] = tce;
tce               387 arch/powerpc/kvm/book3s_64_vio_hv.c 		unsigned long ioba, unsigned long tce)
tce               406 arch/powerpc/kvm/book3s_64_vio_hv.c 	ret = kvmppc_rm_ioba_validate(stt, ioba, 1, tce == 0);
tce               410 arch/powerpc/kvm/book3s_64_vio_hv.c 	ret = kvmppc_rm_tce_validate(stt, tce);
tce               414 arch/powerpc/kvm/book3s_64_vio_hv.c 	dir = iommu_tce_direction(tce);
tce               415 arch/powerpc/kvm/book3s_64_vio_hv.c 	if ((dir != DMA_NONE) && kvmppc_rm_tce_to_ua(vcpu->kvm, tce, &ua, NULL))
tce               436 arch/powerpc/kvm/book3s_64_vio_hv.c 	kvmppc_rm_tce_put(stt, entry, tce);
tce               558 arch/powerpc/kvm/book3s_64_vio_hv.c 		unsigned long tce = be64_to_cpu(((u64 *)tces)[i]);
tce               560 arch/powerpc/kvm/book3s_64_vio_hv.c 		ret = kvmppc_rm_tce_validate(stt, tce);
tce               566 arch/powerpc/kvm/book3s_64_vio_hv.c 		unsigned long tce = be64_to_cpu(((u64 *)tces)[i]);
tce               569 arch/powerpc/kvm/book3s_64_vio_hv.c 		if (kvmppc_rm_tce_to_ua(vcpu->kvm, tce, &ua, NULL)) {
tce               577 arch/powerpc/kvm/book3s_64_vio_hv.c 					iommu_tce_direction(tce));
tce               586 arch/powerpc/kvm/book3s_64_vio_hv.c 		kvmppc_rm_tce_put(stt, entry + i, tce);
tce               289 arch/powerpc/kvm/book3s_pr_papr.c 	unsigned long tce = kvmppc_get_gpr(vcpu, 6);
tce               292 arch/powerpc/kvm/book3s_pr_papr.c 	rc = kvmppc_h_put_tce(vcpu, liobn, ioba, tce);
tce               303 arch/powerpc/kvm/book3s_pr_papr.c 	unsigned long tce = kvmppc_get_gpr(vcpu, 6);
tce               308 arch/powerpc/kvm/book3s_pr_papr.c 			tce, npages);
tce                64 arch/powerpc/platforms/powernv/pci-ioda-tce.c 		unsigned long oldtce, tce = be64_to_cpu(READ_ONCE(tmp[n]));
tce                66 arch/powerpc/platforms/powernv/pci-ioda-tce.c 		if (!tce) {
tce                77 arch/powerpc/platforms/powernv/pci-ioda-tce.c 			tce = __pa(tmp2) | TCE_PCI_READ | TCE_PCI_WRITE;
tce                79 arch/powerpc/platforms/powernv/pci-ioda-tce.c 					cpu_to_be64(tce)));
tce                83 arch/powerpc/platforms/powernv/pci-ioda-tce.c 				tce = oldtce;
tce                87 arch/powerpc/platforms/powernv/pci-ioda-tce.c 		tmp = __va(tce & ~(TCE_PCI_READ | TCE_PCI_WRITE));
tce               144 arch/powerpc/platforms/pseries/iommu.c 	u64 proto_tce, tce;
tce               155 arch/powerpc/platforms/pseries/iommu.c 		tce = proto_tce | (rpn & TCE_RPN_MASK) << tceshift;
tce               156 arch/powerpc/platforms/pseries/iommu.c 		rc = plpar_tce_put((u64)liobn, (u64)tcenum << tceshift, tce);
tce               169 arch/powerpc/platforms/pseries/iommu.c 			printk("\ttce val = 0x%llx\n", tce );
tce               639 arch/powerpc/platforms/pseries/iommu.c 				long *tce, enum dma_data_direction *direction,
tce               646 arch/powerpc/platforms/pseries/iommu.c 	unsigned long newtce = *tce | proto_tce;
tce               656 arch/powerpc/platforms/pseries/iommu.c 		*tce = oldtce & ~(TCE_PCI_READ | TCE_PCI_WRITE);
tce              3311 drivers/net/ethernet/broadcom/cnic_defs.h 	struct ustorm_fcoe_tce tce;
tce              3602 drivers/net/ethernet/broadcom/cnic_defs.h 	struct xstorm_fcoe_tce tce;
tce               390 drivers/vfio/vfio_iommu_spapr_tce.c 		unsigned long tce, unsigned long shift,
tce               396 drivers/vfio/vfio_iommu_spapr_tce.c 	mem = mm_iommu_lookup(container->mm, tce, 1ULL << shift);
tce               400 drivers/vfio/vfio_iommu_spapr_tce.c 	ret = mm_iommu_ua_to_hpa(mem, tce, shift, phpa);
tce               484 drivers/vfio/vfio_iommu_spapr_tce.c static int tce_iommu_use_page(unsigned long tce, unsigned long *hpa)
tce               487 drivers/vfio/vfio_iommu_spapr_tce.c 	enum dma_data_direction direction = iommu_tce_direction(tce);
tce               489 drivers/vfio/vfio_iommu_spapr_tce.c 	if (get_user_pages_fast(tce & PAGE_MASK, 1,
tce               501 drivers/vfio/vfio_iommu_spapr_tce.c 		unsigned long entry, unsigned long tce, unsigned long pages,
tce               509 drivers/vfio/vfio_iommu_spapr_tce.c 		unsigned long offset = tce & IOMMU_PAGE_MASK(tbl) & ~PAGE_MASK;
tce               511 drivers/vfio/vfio_iommu_spapr_tce.c 		ret = tce_iommu_use_page(tce, &hpa);
tce               529 drivers/vfio/vfio_iommu_spapr_tce.c 					tce, ret);
tce               536 drivers/vfio/vfio_iommu_spapr_tce.c 		tce += IOMMU_PAGE_SIZE(tbl);
tce               549 drivers/vfio/vfio_iommu_spapr_tce.c 		unsigned long entry, unsigned long tce, unsigned long pages,
tce               561 drivers/vfio/vfio_iommu_spapr_tce.c 				tce, tbl->it_page_shift, &hpa, &mem);
tce               572 drivers/vfio/vfio_iommu_spapr_tce.c 		hpa |= tce & IOMMU_PAGE_MASK(tbl) & ~PAGE_MASK;
tce               586 drivers/vfio/vfio_iommu_spapr_tce.c 					tce, ret);
tce               593 drivers/vfio/vfio_iommu_spapr_tce.c 		*pua = cpu_to_be64(tce);
tce               595 drivers/vfio/vfio_iommu_spapr_tce.c 		tce += IOMMU_PAGE_SIZE(tbl);