kfd               950 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	struct amdgpu_kfd_dev		kfd;
kfd               103 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	adev->kfd.dev = kgd2kfd_probe((struct kgd_dev *)adev,
kfd               106 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev)
kfd               148 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev) {
kfd               205 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		kgd2kfd_device_init(adev->kfd.dev, &gpu_resources);
kfd               211 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev) {
kfd               212 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		kgd2kfd_device_exit(adev->kfd.dev);
kfd               213 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		adev->kfd.dev = NULL;
kfd               220 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev)
kfd               221 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		kgd2kfd_interrupt(adev->kfd.dev, ih_ring_entry);
kfd               226 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev)
kfd               227 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		kgd2kfd_suspend(adev->kfd.dev);
kfd               234 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev)
kfd               235 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		r = kgd2kfd_resume(adev->kfd.dev);
kfd               244 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev)
kfd               245 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		r = kgd2kfd_pre_reset(adev->kfd.dev);
kfd               254 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev)
kfd               255 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 		r = kgd2kfd_post_reset(adev->kfd.dev);
kfd               672 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c 	if (adev->kfd.dev) {
kfd               743 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c bool kgd2kfd_device_init(struct kfd_dev *kfd,
kfd               749 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c void kgd2kfd_device_exit(struct kfd_dev *kfd)
kfd               757 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c void kgd2kfd_suspend(struct kfd_dev *kfd)
kfd               761 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c int kgd2kfd_resume(struct kfd_dev *kfd)
kfd               766 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c int kgd2kfd_pre_reset(struct kfd_dev *kfd)
kfd               771 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c int kgd2kfd_post_reset(struct kfd_dev *kfd)
kfd               776 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c void kgd2kfd_interrupt(struct kfd_dev *kfd, const void *ih_ring_entry)
kfd               780 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c void kgd2kfd_set_sram_ecc_flag(struct kfd_dev *kfd)
kfd               244 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h bool kgd2kfd_device_init(struct kfd_dev *kfd,
kfd               246 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void kgd2kfd_device_exit(struct kfd_dev *kfd);
kfd               247 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void kgd2kfd_suspend(struct kfd_dev *kfd);
kfd               248 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h int kgd2kfd_resume(struct kfd_dev *kfd);
kfd               249 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h int kgd2kfd_pre_reset(struct kfd_dev *kfd);
kfd               250 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h int kgd2kfd_post_reset(struct kfd_dev *kfd);
kfd               251 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void kgd2kfd_interrupt(struct kfd_dev *kfd, const void *ih_ring_entry);
kfd               256 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h void kgd2kfd_set_sram_ecc_flag(struct kfd_dev *kfd);
kfd               149 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    (adev->kfd.vram_used + vram_needed >
kfd               155 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		adev->kfd.vram_used += vram_needed;
kfd               181 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			adev->kfd.vram_used -= size;
kfd               182 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			WARN_ONCE(adev->kfd.vram_used < 0,
kfd              5744 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	kgd2kfd_set_sram_ecc_flag(adev->kfd.dev);
kfd               250 drivers/gpu/drm/amd/amdgpu/gmc_v9_0.c 	kgd2kfd_set_sram_ecc_flag(adev->kfd.dev);
kfd              2049 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c 	kgd2kfd_set_sram_ecc_flag(adev->kfd.dev);
kfd              1009 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		struct kfd_dev *kfd;
kfd              1019 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		kfd = kfd_device_by_id(GET_GPU_ID(args->event_page_offset));
kfd              1020 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		if (!kfd) {
kfd              1026 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		pdd = kfd_bind_process_to_device(kfd, p);
kfd              1042 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		err = amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel(kfd->kgd,
kfd               501 drivers/gpu/drm/amd/amdkfd/kfd_device.c static int kfd_gtt_sa_init(struct kfd_dev *kfd, unsigned int buf_size,
kfd               503 drivers/gpu/drm/amd/amdkfd/kfd_device.c static void kfd_gtt_sa_fini(struct kfd_dev *kfd);
kfd               505 drivers/gpu/drm/amd/amdkfd/kfd_device.c static int kfd_resume(struct kfd_dev *kfd);
kfd               527 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	struct kfd_dev *kfd;
kfd               536 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd = kzalloc(sizeof(*kfd), GFP_KERNEL);
kfd               537 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd)
kfd               544 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->pci_atomic_requested = amdgpu_amdkfd_have_atomics_support(kgd);
kfd               546 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	    !kfd->pci_atomic_requested) {
kfd               550 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfree(kfd);
kfd               554 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->kgd = kgd;
kfd               555 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->device_info = device_info;
kfd               556 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->pdev = pdev;
kfd               557 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->init_complete = false;
kfd               558 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->kfd2kgd = f2g;
kfd               559 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	atomic_set(&kfd->compute_profile, 0);
kfd               561 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_init(&kfd->doorbell_mutex);
kfd               562 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	memset(&kfd->doorbell_available_index, 0,
kfd               563 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		sizeof(kfd->doorbell_available_index));
kfd               565 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	atomic_set(&kfd->sram_ecc_flag, 0);
kfd               567 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	return kfd;
kfd               570 drivers/gpu/drm/amd/amdkfd/kfd_device.c static void kfd_cwsr_init(struct kfd_dev *kfd)
kfd               572 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (cwsr_enable && kfd->device_info->supports_cwsr) {
kfd               573 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		if (kfd->device_info->asic_family < CHIP_VEGA10) {
kfd               575 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa = cwsr_trap_gfx8_hex;
kfd               576 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa_size = sizeof(cwsr_trap_gfx8_hex);
kfd               577 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		} else if (kfd->device_info->asic_family == CHIP_ARCTURUS) {
kfd               579 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa = cwsr_trap_arcturus_hex;
kfd               580 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa_size = sizeof(cwsr_trap_arcturus_hex);
kfd               581 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		} else if (kfd->device_info->asic_family < CHIP_NAVI10) {
kfd               583 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa = cwsr_trap_gfx9_hex;
kfd               584 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa_size = sizeof(cwsr_trap_gfx9_hex);
kfd               587 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa = cwsr_trap_gfx10_hex;
kfd               588 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->cwsr_isa_size = sizeof(cwsr_trap_gfx10_hex);
kfd               591 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd->cwsr_enabled = true;
kfd               595 drivers/gpu/drm/amd/amdkfd/kfd_device.c bool kgd2kfd_device_init(struct kfd_dev *kfd,
kfd               600 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->mec_fw_version = amdgpu_amdkfd_get_fw_version(kfd->kgd,
kfd               602 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->sdma_fw_version = amdgpu_amdkfd_get_fw_version(kfd->kgd,
kfd               604 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->shared_resources = *gpu_resources;
kfd               606 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->vm_info.first_vmid_kfd = ffs(gpu_resources->compute_vmid_bitmap)-1;
kfd               607 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->vm_info.last_vmid_kfd = fls(gpu_resources->compute_vmid_bitmap)-1;
kfd               608 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->vm_info.vmid_num_kfd = kfd->vm_info.last_vmid_kfd
kfd               609 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			- kfd->vm_info.first_vmid_kfd + 1;
kfd               613 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			|| (hws_max_conc_proc > kfd->vm_info.vmid_num_kfd)) {
kfd               616 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			hws_max_conc_proc, kfd->vm_info.vmid_num_kfd,
kfd               617 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->vm_info.vmid_num_kfd);
kfd               618 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd->max_proc_per_quantum = kfd->vm_info.vmid_num_kfd;
kfd               620 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd->max_proc_per_quantum = hws_max_conc_proc;
kfd               623 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (hws_gws_support && amdgpu_amdkfd_alloc_gws(kfd->kgd,
kfd               624 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			amdgpu_amdkfd_get_num_gws(kfd->kgd), &kfd->gws)) {
kfd               626 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			amdgpu_amdkfd_get_num_gws(kfd->kgd));
kfd               631 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->device_info->mqd_size_aligned;
kfd               648 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->kgd, size, &kfd->gtt_mem,
kfd               649 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			&kfd->gtt_start_gpu_addr, &kfd->gtt_start_cpu_ptr,
kfd               658 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd_gtt_sa_init(kfd, size, 512) != 0) {
kfd               663 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd_doorbell_init(kfd)) {
kfd               669 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd->kfd2kgd->get_hive_id)
kfd               670 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd->hive_id = kfd->kfd2kgd->get_hive_id(kfd->kgd);
kfd               672 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd_interrupt_init(kfd)) {
kfd               677 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->dqm = device_queue_manager_init(kfd);
kfd               678 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd->dqm) {
kfd               683 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd_iommu_device_init(kfd)) {
kfd               688 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd_cwsr_init(kfd);
kfd               690 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd_resume(kfd))
kfd               693 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->dbgmgr = NULL;
kfd               695 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd_topology_add_device(kfd)) {
kfd               700 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->init_complete = true;
kfd               701 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	dev_info(kfd_device, "added device %x:%x\n", kfd->pdev->vendor,
kfd               702 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		 kfd->pdev->device);
kfd               705 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd->dqm->sched_policy);
kfd               712 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	device_queue_manager_uninit(kfd->dqm);
kfd               714 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd_interrupt_exit(kfd);
kfd               716 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd_doorbell_fini(kfd);
kfd               718 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd_gtt_sa_fini(kfd);
kfd               720 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	amdgpu_amdkfd_free_gtt_mem(kfd->kgd, kfd->gtt_mem);
kfd               723 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		amdgpu_amdkfd_free_gws(kfd->kgd, kfd->gws);
kfd               726 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd->pdev->vendor, kfd->pdev->device);
kfd               728 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	return kfd->init_complete;
kfd               731 drivers/gpu/drm/amd/amdkfd/kfd_device.c void kgd2kfd_device_exit(struct kfd_dev *kfd)
kfd               733 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd->init_complete) {
kfd               734 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kgd2kfd_suspend(kfd);
kfd               735 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		device_queue_manager_uninit(kfd->dqm);
kfd               736 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd_interrupt_exit(kfd);
kfd               737 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd_topology_remove_device(kfd);
kfd               738 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd_doorbell_fini(kfd);
kfd               739 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		kfd_gtt_sa_fini(kfd);
kfd               740 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		amdgpu_amdkfd_free_gtt_mem(kfd->kgd, kfd->gtt_mem);
kfd               742 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			amdgpu_amdkfd_free_gws(kfd->kgd, kfd->gws);
kfd               745 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfree(kfd);
kfd               748 drivers/gpu/drm/amd/amdkfd/kfd_device.c int kgd2kfd_pre_reset(struct kfd_dev *kfd)
kfd               750 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd->init_complete)
kfd               752 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kgd2kfd_suspend(kfd);
kfd               755 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	dqm_lock(kfd->dqm);
kfd               757 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd_signal_reset_event(kfd);
kfd               767 drivers/gpu/drm/amd/amdkfd/kfd_device.c int kgd2kfd_post_reset(struct kfd_dev *kfd)
kfd               771 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd->init_complete)
kfd               774 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	dqm_unlock(kfd->dqm);
kfd               776 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	ret = kfd_resume(kfd);
kfd               781 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	atomic_set(&kfd->sram_ecc_flag, 0);
kfd               791 drivers/gpu/drm/amd/amdkfd/kfd_device.c void kgd2kfd_suspend(struct kfd_dev *kfd)
kfd               793 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd->init_complete)
kfd               800 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->dqm->ops.stop(kfd->dqm);
kfd               802 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd_iommu_suspend(kfd);
kfd               805 drivers/gpu/drm/amd/amdkfd/kfd_device.c int kgd2kfd_resume(struct kfd_dev *kfd)
kfd               809 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd->init_complete)
kfd               812 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	ret = kfd_resume(kfd);
kfd               824 drivers/gpu/drm/amd/amdkfd/kfd_device.c static int kfd_resume(struct kfd_dev *kfd)
kfd               828 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	err = kfd_iommu_resume(kfd);
kfd               832 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->pdev->vendor, kfd->pdev->device);
kfd               836 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	err = kfd->dqm->ops.start(kfd->dqm);
kfd               840 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->pdev->vendor, kfd->pdev->device);
kfd               847 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd_iommu_suspend(kfd);
kfd               852 drivers/gpu/drm/amd/amdkfd/kfd_device.c void kgd2kfd_interrupt(struct kfd_dev *kfd, const void *ih_ring_entry)
kfd               858 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd->init_complete)
kfd               861 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd->device_info->ih_ring_entry_size > sizeof(patched_ihre)) {
kfd               866 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	spin_lock_irqsave(&kfd->interrupt_lock, flags);
kfd               868 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd->interrupts_active
kfd               869 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	    && interrupt_is_wanted(kfd, ih_ring_entry,
kfd               871 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	    && enqueue_ih_ring_entry(kfd,
kfd               873 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		queue_work(kfd->ih_wq, &kfd->interrupt_work);
kfd               875 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	spin_unlock_irqrestore(&kfd->interrupt_lock, flags);
kfd               964 drivers/gpu/drm/amd/amdkfd/kfd_device.c static int kfd_gtt_sa_init(struct kfd_dev *kfd, unsigned int buf_size,
kfd               976 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->gtt_sa_chunk_size = chunk_size;
kfd               977 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->gtt_sa_num_of_chunks = buf_size / chunk_size;
kfd               979 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	num_of_longs = (kfd->gtt_sa_num_of_chunks + BITS_PER_LONG - 1) /
kfd               982 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfd->gtt_sa_bitmap = kcalloc(num_of_longs, sizeof(long), GFP_KERNEL);
kfd               984 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (!kfd->gtt_sa_bitmap)
kfd               988 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			kfd->gtt_sa_num_of_chunks, kfd->gtt_sa_bitmap);
kfd               990 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_init(&kfd->gtt_sa_lock);
kfd               996 drivers/gpu/drm/amd/amdkfd/kfd_device.c static void kfd_gtt_sa_fini(struct kfd_dev *kfd)
kfd               998 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_destroy(&kfd->gtt_sa_lock);
kfd               999 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	kfree(kfd->gtt_sa_bitmap);
kfd              1016 drivers/gpu/drm/amd/amdkfd/kfd_device.c int kfd_gtt_sa_allocate(struct kfd_dev *kfd, unsigned int size,
kfd              1024 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (size > kfd->gtt_sa_num_of_chunks * kfd->gtt_sa_chunk_size)
kfd              1035 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_lock(&kfd->gtt_sa_lock);
kfd              1039 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	found = find_next_zero_bit(kfd->gtt_sa_bitmap,
kfd              1040 drivers/gpu/drm/amd/amdkfd/kfd_device.c 					kfd->gtt_sa_num_of_chunks,
kfd              1046 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (found == kfd->gtt_sa_num_of_chunks)
kfd              1053 drivers/gpu/drm/amd/amdkfd/kfd_device.c 					kfd->gtt_start_gpu_addr,
kfd              1055 drivers/gpu/drm/amd/amdkfd/kfd_device.c 					kfd->gtt_sa_chunk_size);
kfd              1057 drivers/gpu/drm/amd/amdkfd/kfd_device.c 					kfd->gtt_start_cpu_ptr,
kfd              1059 drivers/gpu/drm/amd/amdkfd/kfd_device.c 					kfd->gtt_sa_chunk_size);
kfd              1065 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (size <= kfd->gtt_sa_chunk_size) {
kfd              1067 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		set_bit(found, kfd->gtt_sa_bitmap);
kfd              1072 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	cur_size = size - kfd->gtt_sa_chunk_size;
kfd              1075 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			find_next_zero_bit(kfd->gtt_sa_bitmap,
kfd              1076 drivers/gpu/drm/amd/amdkfd/kfd_device.c 					kfd->gtt_sa_num_of_chunks, ++found);
kfd              1090 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		if (found == kfd->gtt_sa_num_of_chunks)
kfd              1094 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		if (cur_size <= kfd->gtt_sa_chunk_size)
kfd              1097 drivers/gpu/drm/amd/amdkfd/kfd_device.c 			cur_size -= kfd->gtt_sa_chunk_size;
kfd              1108 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		set_bit(found, kfd->gtt_sa_bitmap);
kfd              1111 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_unlock(&kfd->gtt_sa_lock);
kfd              1116 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_unlock(&kfd->gtt_sa_lock);
kfd              1121 drivers/gpu/drm/amd/amdkfd/kfd_device.c int kfd_gtt_sa_free(struct kfd_dev *kfd, struct kfd_mem_obj *mem_obj)
kfd              1132 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_lock(&kfd->gtt_sa_lock);
kfd              1138 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		clear_bit(bit, kfd->gtt_sa_bitmap);
kfd              1140 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	mutex_unlock(&kfd->gtt_sa_lock);
kfd              1146 drivers/gpu/drm/amd/amdkfd/kfd_device.c void kgd2kfd_set_sram_ecc_flag(struct kfd_dev *kfd)
kfd              1148 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (kfd)
kfd              1149 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		atomic_inc(&kfd->sram_ecc_flag);
kfd              1152 drivers/gpu/drm/amd/amdkfd/kfd_device.c void kfd_inc_compute_active(struct kfd_dev *kfd)
kfd              1154 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	if (atomic_inc_return(&kfd->compute_profile) == 1)
kfd              1155 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		amdgpu_amdkfd_set_compute_idle(kfd->kgd, false);
kfd              1158 drivers/gpu/drm/amd/amdkfd/kfd_device.c void kfd_dec_compute_active(struct kfd_dev *kfd)
kfd              1160 drivers/gpu/drm/amd/amdkfd/kfd_device.c 	int count = atomic_dec_return(&kfd->compute_profile);
kfd              1163 drivers/gpu/drm/amd/amdkfd/kfd_device.c 		amdgpu_amdkfd_set_compute_idle(kfd->kgd, true);
kfd                52 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c size_t kfd_doorbell_process_slice(struct kfd_dev *kfd)
kfd                54 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	return roundup(kfd->device_info->doorbell_size *
kfd                60 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c int kfd_doorbell_init(struct kfd_dev *kfd)
kfd                73 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 			roundup(kfd->shared_resources.doorbell_start_offset,
kfd                74 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 					kfd_doorbell_process_slice(kfd));
kfd                77 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 			rounddown(kfd->shared_resources.doorbell_aperture_size,
kfd                78 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 					kfd_doorbell_process_slice(kfd));
kfd                83 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 						kfd_doorbell_process_slice(kfd);
kfd                91 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	kfd->doorbell_base = kfd->shared_resources.doorbell_physical_address +
kfd                94 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	kfd->doorbell_id_offset = doorbell_start_offset / sizeof(u32);
kfd                96 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	kfd->doorbell_kernel_ptr = ioremap(kfd->doorbell_base,
kfd                97 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 					   kfd_doorbell_process_slice(kfd));
kfd                99 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	if (!kfd->doorbell_kernel_ptr)
kfd               104 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 			(uintptr_t)kfd->doorbell_base);
kfd               107 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 			kfd->doorbell_id_offset);
kfd               113 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 			(uintptr_t)kfd->doorbell_base);
kfd               116 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 			kfd->shared_resources.doorbell_aperture_size);
kfd               118 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	pr_debug("doorbell kernel address == %p\n", kfd->doorbell_kernel_ptr);
kfd               123 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c void kfd_doorbell_fini(struct kfd_dev *kfd)
kfd               125 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	if (kfd->doorbell_kernel_ptr)
kfd               126 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 		iounmap(kfd->doorbell_kernel_ptr);
kfd               167 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c void __iomem *kfd_get_kernel_doorbell(struct kfd_dev *kfd,
kfd               172 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	mutex_lock(&kfd->doorbell_mutex);
kfd               173 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	inx = find_first_zero_bit(kfd->doorbell_available_index,
kfd               176 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	__set_bit(inx, kfd->doorbell_available_index);
kfd               177 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	mutex_unlock(&kfd->doorbell_mutex);
kfd               182 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	inx *= kfd->device_info->doorbell_size / sizeof(u32);
kfd               188 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	*doorbell_off = kfd->doorbell_id_offset + inx;
kfd               195 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	return kfd->doorbell_kernel_ptr + inx;
kfd               198 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c void kfd_release_kernel_doorbell(struct kfd_dev *kfd, u32 __iomem *db_addr)
kfd               202 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	inx = (unsigned int)(db_addr - kfd->doorbell_kernel_ptr)
kfd               203 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 		* sizeof(u32) / kfd->device_info->doorbell_size;
kfd               205 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	mutex_lock(&kfd->doorbell_mutex);
kfd               206 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	__clear_bit(inx, kfd->doorbell_available_index);
kfd               207 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	mutex_unlock(&kfd->doorbell_mutex);
kfd               228 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c unsigned int kfd_doorbell_id_to_offset(struct kfd_dev *kfd,
kfd               238 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	return kfd->doorbell_id_offset +
kfd               240 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 		* kfd_doorbell_process_slice(kfd) / sizeof(u32) +
kfd               241 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 		doorbell_id * kfd->device_info->doorbell_size / sizeof(u32);
kfd               244 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c uint64_t kfd_get_number_elems(struct kfd_dev *kfd)
kfd               246 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 	uint64_t num_of_elems = (kfd->shared_resources.doorbell_aperture_size -
kfd               247 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 				kfd->shared_resources.doorbell_start_offset) /
kfd               248 drivers/gpu/drm/amd/amdkfd/kfd_doorbell.c 					kfd_doorbell_process_slice(kfd) + 1;
kfd                52 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c int kfd_interrupt_init(struct kfd_dev *kfd)
kfd                56 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	r = kfifo_alloc(&kfd->ih_fifo,
kfd                57 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 		KFD_IH_NUM_ENTRIES * kfd->device_info->ih_ring_entry_size,
kfd                64 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	kfd->ih_wq = alloc_workqueue("KFD IH", WQ_HIGHPRI, 1);
kfd                65 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	if (unlikely(!kfd->ih_wq)) {
kfd                66 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 		kfifo_free(&kfd->ih_fifo);
kfd                70 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	spin_lock_init(&kfd->interrupt_lock);
kfd                72 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	INIT_WORK(&kfd->interrupt_work, interrupt_wq);
kfd                74 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	kfd->interrupts_active = true;
kfd                86 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c void kfd_interrupt_exit(struct kfd_dev *kfd)
kfd                95 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	spin_lock_irqsave(&kfd->interrupt_lock, flags);
kfd                96 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	kfd->interrupts_active = false;
kfd                97 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	spin_unlock_irqrestore(&kfd->interrupt_lock, flags);
kfd               104 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	flush_workqueue(kfd->ih_wq);
kfd               106 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	kfifo_free(&kfd->ih_fifo);
kfd               112 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c bool enqueue_ih_ring_entry(struct kfd_dev *kfd,	const void *ih_ring_entry)
kfd               116 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	count = kfifo_in(&kfd->ih_fifo, ih_ring_entry,
kfd               117 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 				kfd->device_info->ih_ring_entry_size);
kfd               118 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	if (count != kfd->device_info->ih_ring_entry_size) {
kfd               131 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c static bool dequeue_ih_ring_entry(struct kfd_dev *kfd, void *ih_ring_entry)
kfd               135 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	count = kfifo_out(&kfd->ih_fifo, ih_ring_entry,
kfd               136 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 				kfd->device_info->ih_ring_entry_size);
kfd               138 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	WARN_ON(count && count != kfd->device_info->ih_ring_entry_size);
kfd               140 drivers/gpu/drm/amd/amdkfd/kfd_interrupt.c 	return count == kfd->device_info->ih_ring_entry_size;
kfd                39 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c int kfd_iommu_check_device(struct kfd_dev *kfd)
kfd                44 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	if (!kfd->device_info->needs_iommu_device)
kfd                48 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	err = amd_iommu_device_info(kfd->pdev, &iommu_info);
kfd                60 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c int kfd_iommu_device_init(struct kfd_dev *kfd)
kfd                67 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	top_dev = kfd_topology_device_by_id(kfd->id);
kfd                69 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	if (!kfd->device_info->needs_iommu_device)
kfd                73 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	err = amd_iommu_device_info(kfd->pdev, &iommu_info);
kfd                91 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 			(unsigned int)(1 << kfd->device_info->max_pasid_bits),
kfd               217 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c static int kfd_bind_processes_to_device(struct kfd_dev *kfd)
kfd               228 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 		pdd = kfd_get_process_device_data(kfd, p);
kfd               235 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 		err = amd_iommu_bind_pasid(kfd->pdev, p->pasid,
kfd               258 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c static void kfd_unbind_processes_from_device(struct kfd_dev *kfd)
kfd               268 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 		pdd = kfd_get_process_device_data(kfd, p);
kfd               288 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c void kfd_iommu_suspend(struct kfd_dev *kfd)
kfd               290 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	if (!kfd->device_info->needs_iommu_device)
kfd               293 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	kfd_unbind_processes_from_device(kfd);
kfd               295 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	amd_iommu_set_invalidate_ctx_cb(kfd->pdev, NULL);
kfd               296 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	amd_iommu_set_invalid_ppr_cb(kfd->pdev, NULL);
kfd               297 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	amd_iommu_free_device(kfd->pdev);
kfd               305 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c int kfd_iommu_resume(struct kfd_dev *kfd)
kfd               310 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	if (!kfd->device_info->needs_iommu_device)
kfd               315 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	err = amd_iommu_init_device(kfd->pdev, pasid_limit);
kfd               319 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	amd_iommu_set_invalidate_ctx_cb(kfd->pdev,
kfd               321 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	amd_iommu_set_invalid_ppr_cb(kfd->pdev,
kfd               324 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 	err = kfd_bind_processes_to_device(kfd);
kfd               326 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 		amd_iommu_set_invalidate_ctx_cb(kfd->pdev, NULL);
kfd               327 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 		amd_iommu_set_invalid_ppr_cb(kfd->pdev, NULL);
kfd               328 drivers/gpu/drm/amd/amdkfd/kfd_iommu.c 		amd_iommu_free_device(kfd->pdev);
kfd                30 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h int kfd_iommu_check_device(struct kfd_dev *kfd);
kfd                31 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h int kfd_iommu_device_init(struct kfd_dev *kfd);
kfd                36 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h void kfd_iommu_suspend(struct kfd_dev *kfd);
kfd                37 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h int kfd_iommu_resume(struct kfd_dev *kfd);
kfd                43 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h static inline int kfd_iommu_check_device(struct kfd_dev *kfd)
kfd                47 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h static inline int kfd_iommu_device_init(struct kfd_dev *kfd)
kfd                62 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h static inline void kfd_iommu_suspend(struct kfd_dev *kfd)
kfd                66 drivers/gpu/drm/amd/amdkfd/kfd_iommu.h static inline int kfd_iommu_resume(struct kfd_dev *kfd)
kfd               119 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v10.c 	struct kfd_dev *kfd = pm->dqm->dev;
kfd               131 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v10.c 			kfd->max_proc_per_quantum);
kfd               115 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 	struct kfd_dev *kfd = pm->dqm->dev;
kfd               127 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_v9.c 			kfd->max_proc_per_quantum);
kfd               120 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 	struct kfd_dev *kfd = pm->dqm->dev;
kfd               135 drivers/gpu/drm/amd/amdkfd/kfd_kernel_queue_vi.c 			kfd->max_proc_per_quantum);
kfd                69 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager.h 	struct kfd_mem_obj*	(*allocate_mqd)(struct kfd_dev *kfd,
kfd                75 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd,
kfd                80 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c 	if (kfd_gtt_sa_allocate(kfd, sizeof(struct cik_mqd),
kfd                69 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd,
kfd                79 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 	if (kfd->cwsr_enabled && (q->type == KFD_QUEUE_TYPE_COMPUTE)) {
kfd                83 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 		retval = amdgpu_amdkfd_alloc_gtt_mem(kfd->kgd,
kfd                90 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 		retval = kfd_gtt_sa_allocate(kfd, sizeof(struct v10_compute_mqd),
kfd               253 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 	struct kfd_dev *kfd = mm->dev;
kfd               256 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v10.c 		amdgpu_amdkfd_free_gtt_mem(kfd->kgd, mqd_mem_obj->gtt_mem);
kfd                84 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd,
kfd                94 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c 	if (kfd->cwsr_enabled && (q->type == KFD_QUEUE_TYPE_COMPUTE)) {
kfd                98 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c 		retval = amdgpu_amdkfd_alloc_gtt_mem(kfd->kgd,
kfd               105 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c 		retval = kfd_gtt_sa_allocate(kfd, sizeof(struct v9_mqd),
kfd               272 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c 	struct kfd_dev *kfd = mm->dev;
kfd               275 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c 		amdgpu_amdkfd_free_gtt_mem(kfd->kgd, mqd_mem_obj->gtt_mem);
kfd                78 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd,
kfd                83 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c 	if (kfd_gtt_sa_allocate(kfd, sizeof(struct vi_mqd),
kfd               809 drivers/gpu/drm/amd/amdkfd/kfd_priv.h size_t kfd_doorbell_process_slice(struct kfd_dev *kfd);
kfd               810 drivers/gpu/drm/amd/amdkfd/kfd_priv.h int kfd_doorbell_init(struct kfd_dev *kfd);
kfd               811 drivers/gpu/drm/amd/amdkfd/kfd_priv.h void kfd_doorbell_fini(struct kfd_dev *kfd);
kfd               814 drivers/gpu/drm/amd/amdkfd/kfd_priv.h void __iomem *kfd_get_kernel_doorbell(struct kfd_dev *kfd,
kfd               816 drivers/gpu/drm/amd/amdkfd/kfd_priv.h void kfd_release_kernel_doorbell(struct kfd_dev *kfd, u32 __iomem *db_addr);
kfd               820 drivers/gpu/drm/amd/amdkfd/kfd_priv.h unsigned int kfd_doorbell_id_to_offset(struct kfd_dev *kfd,
kfd               830 drivers/gpu/drm/amd/amdkfd/kfd_priv.h int kfd_gtt_sa_allocate(struct kfd_dev *kfd, unsigned int size,
kfd               833 drivers/gpu/drm/amd/amdkfd/kfd_priv.h int kfd_gtt_sa_free(struct kfd_dev *kfd, struct kfd_mem_obj *mem_obj);
kfd               858 drivers/gpu/drm/amd/amdkfd/kfd_priv.h bool enqueue_ih_ring_entry(struct kfd_dev *kfd,	const void *ih_ring_entry);
kfd               997 drivers/gpu/drm/amd/amdkfd/kfd_priv.h uint64_t kfd_get_number_elems(struct kfd_dev *kfd);
kfd               223 samples/bpf/task_fd_query_user.c 	int err, res, kfd, efd;
kfd               228 samples/bpf/task_fd_query_user.c 	kfd = open(buf, O_WRONLY | O_APPEND, 0);
kfd               229 samples/bpf/task_fd_query_user.c 	CHECK_PERROR_RET(kfd < 0);
kfd               238 samples/bpf/task_fd_query_user.c 	CHECK_PERROR_RET(write(kfd, buf, strlen(buf)) < 0);
kfd               240 samples/bpf/task_fd_query_user.c 	close(kfd);
kfd               241 samples/bpf/task_fd_query_user.c 	kfd = -1;
kfd               257 samples/bpf/task_fd_query_user.c 	kfd = sys_perf_event_open(&attr, -1, 0, -1, PERF_FLAG_FD_CLOEXEC);
kfd               258 samples/bpf/task_fd_query_user.c 	CHECK_PERROR_RET(kfd < 0);
kfd               259 samples/bpf/task_fd_query_user.c 	CHECK_PERROR_RET(ioctl(kfd, PERF_EVENT_IOC_SET_BPF, prog_fd[0]) < 0);
kfd               260 samples/bpf/task_fd_query_user.c 	CHECK_PERROR_RET(ioctl(kfd, PERF_EVENT_IOC_ENABLE, 0) < 0);
kfd               263 samples/bpf/task_fd_query_user.c 	err = bpf_task_fd_query(getpid(), kfd, 0, buf, &len,
kfd               287 samples/bpf/task_fd_query_user.c 	close(kfd);
kfd               419 tools/perf/builtin-probe.c 	int ret, ret2, ufd = -1, kfd = -1;
kfd               433 tools/perf/builtin-probe.c 	ret = probe_file__open_both(&kfd, &ufd, PF_FL_RW);
kfd               444 tools/perf/builtin-probe.c 	ret = probe_file__get_events(kfd, filter, klist);
kfd               449 tools/perf/builtin-probe.c 		ret = probe_file__del_strlist(kfd, klist);
kfd               470 tools/perf/builtin-probe.c 	if (kfd >= 0)
kfd               471 tools/perf/builtin-probe.c 		close(kfd);
kfd              3467 tools/perf/util/probe-event.c 	int ret, ret2, ufd = -1, kfd = -1;
kfd              3474 tools/perf/util/probe-event.c 	ret = probe_file__open_both(&kfd, &ufd, PF_FL_RW);
kfd              3478 tools/perf/util/probe-event.c 	ret = probe_file__del_events(kfd, filter);
kfd              3490 tools/perf/util/probe-event.c 	if (kfd >= 0)
kfd              3491 tools/perf/util/probe-event.c 		close(kfd);
kfd               117 tools/perf/util/probe-file.c int probe_file__open_both(int *kfd, int *ufd, int flag)
kfd               119 tools/perf/util/probe-file.c 	if (!kfd || !ufd)
kfd               122 tools/perf/util/probe-file.c 	*kfd = open_kprobe_events(flag & PF_FL_RW);
kfd               124 tools/perf/util/probe-file.c 	if (*kfd < 0 && *ufd < 0) {
kfd               125 tools/perf/util/probe-file.c 		print_both_open_warning(*kfd, *ufd);
kfd               126 tools/perf/util/probe-file.c 		return *kfd;
kfd                42 tools/perf/util/probe-file.h int probe_file__open_both(int *kfd, int *ufd, int flag);