amn                75 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_mn *amn = container_of(work, struct amdgpu_mn, work);
amn                76 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_device *adev = amn->adev;
amn                81 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	down_write(&amn->lock);
amn                82 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	hash_del(&amn->node);
amn                84 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 					     &amn->objects.rb_root, it.rb) {
amn                91 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	up_write(&amn->lock);
amn                94 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	hmm_mirror_unregister(&amn->mirror);
amn                95 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	kfree(amn);
amn               107 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_mn *amn = container_of(mirror, struct amdgpu_mn, mirror);
amn               109 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	INIT_WORK(&amn->work, amdgpu_mn_destroy);
amn               110 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	schedule_work(&amn->work);
amn               140 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c static int amdgpu_mn_read_lock(struct amdgpu_mn *amn, bool blockable)
amn               143 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		down_read(&amn->lock);
amn               144 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	else if (!down_read_trylock(&amn->lock))
amn               155 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c static void amdgpu_mn_read_unlock(struct amdgpu_mn *amn)
amn               157 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	up_read(&amn->lock);
amn               202 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_mn *amn = container_of(mirror, struct amdgpu_mn, mirror);
amn               214 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	if (amdgpu_mn_read_lock(amn, blockable))
amn               217 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	it = interval_tree_iter_first(&amn->objects, start, end);
amn               222 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			amdgpu_mn_read_unlock(amn);
amn               232 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amdgpu_mn_read_unlock(amn);
amn               251 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_mn *amn = container_of(mirror, struct amdgpu_mn, mirror);
amn               260 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	if (amdgpu_mn_read_lock(amn, blockable))
amn               263 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	it = interval_tree_iter_first(&amn->objects, start, end);
amn               269 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			amdgpu_mn_read_unlock(amn);
amn               281 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 				amdgpu_amdkfd_evict_userptr(mem, amn->mm);
amn               285 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amdgpu_mn_read_unlock(amn);
amn               319 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_mn *amn;
amn               329 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	hash_for_each_possible(adev->mn_hash, amn, node, key)
amn               330 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		if (AMDGPU_MN_KEY(amn->mm, amn->type) == key)
amn               333 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn = kzalloc(sizeof(*amn), GFP_KERNEL);
amn               334 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	if (!amn) {
amn               335 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		amn = ERR_PTR(-ENOMEM);
amn               339 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn->adev = adev;
amn               340 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn->mm = mm;
amn               341 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	init_rwsem(&amn->lock);
amn               342 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn->type = type;
amn               343 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn->objects = RB_ROOT_CACHED;
amn               345 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn->mirror.ops = &amdgpu_hmm_mirror_ops[type];
amn               346 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	r = hmm_mirror_register(&amn->mirror, mm);
amn               350 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	hash_add(adev->mn_hash, &amn->node, AMDGPU_MN_KEY(mm, type));
amn               356 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	return amn;
amn               361 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	kfree(amn);
amn               381 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_mn *amn;
amn               386 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn = amdgpu_mn_get(adev, type);
amn               387 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	if (IS_ERR(amn))
amn               388 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		return PTR_ERR(amn);
amn               396 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	down_write(&amn->lock);
amn               398 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	while ((it = interval_tree_iter_first(&amn->objects, addr, end))) {
amn               401 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		interval_tree_remove(&node->it, &amn->objects);
amn               412 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	bo->mn = amn;
amn               420 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	interval_tree_insert(&node->it, &amn->objects);
amn               422 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	up_write(&amn->lock);
amn               437 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	struct amdgpu_mn *amn;
amn               442 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	amn = bo->mn;
amn               443 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	if (amn == NULL) {
amn               448 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	down_write(&amn->lock);
amn               460 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 		interval_tree_remove(&node->it, &amn->objects);
amn               464 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 	up_write(&amn->lock);