rmn 66 drivers/gpu/drm/radeon/radeon_mn.c struct radeon_mn *rmn = container_of(mn, struct radeon_mn, mn); rmn 79 drivers/gpu/drm/radeon/radeon_mn.c mutex_lock(&rmn->lock); rmn 80 drivers/gpu/drm/radeon/radeon_mn.c else if (!mutex_trylock(&rmn->lock)) rmn 83 drivers/gpu/drm/radeon/radeon_mn.c it = interval_tree_iter_first(&rmn->objects, range->start, end); rmn 123 drivers/gpu/drm/radeon/radeon_mn.c mutex_unlock(&rmn->lock); rmn 143 drivers/gpu/drm/radeon/radeon_mn.c struct radeon_mn *rmn; rmn 145 drivers/gpu/drm/radeon/radeon_mn.c rmn = kzalloc(sizeof(*rmn), GFP_KERNEL); rmn 146 drivers/gpu/drm/radeon/radeon_mn.c if (!rmn) rmn 149 drivers/gpu/drm/radeon/radeon_mn.c mutex_init(&rmn->lock); rmn 150 drivers/gpu/drm/radeon/radeon_mn.c rmn->objects = RB_ROOT_CACHED; rmn 151 drivers/gpu/drm/radeon/radeon_mn.c return &rmn->mn; rmn 179 drivers/gpu/drm/radeon/radeon_mn.c struct radeon_mn *rmn; rmn 187 drivers/gpu/drm/radeon/radeon_mn.c rmn = container_of(mn, struct radeon_mn, mn); rmn 191 drivers/gpu/drm/radeon/radeon_mn.c mutex_lock(&rmn->lock); rmn 193 drivers/gpu/drm/radeon/radeon_mn.c while ((it = interval_tree_iter_first(&rmn->objects, addr, end))) { rmn 196 drivers/gpu/drm/radeon/radeon_mn.c interval_tree_remove(&node->it, &rmn->objects); rmn 205 drivers/gpu/drm/radeon/radeon_mn.c mutex_unlock(&rmn->lock); rmn 210 drivers/gpu/drm/radeon/radeon_mn.c bo->mn = rmn; rmn 218 drivers/gpu/drm/radeon/radeon_mn.c interval_tree_insert(&node->it, &rmn->objects); rmn 220 drivers/gpu/drm/radeon/radeon_mn.c mutex_unlock(&rmn->lock); rmn 234 drivers/gpu/drm/radeon/radeon_mn.c struct radeon_mn *rmn = bo->mn; rmn 237 drivers/gpu/drm/radeon/radeon_mn.c if (!rmn) rmn 240 drivers/gpu/drm/radeon/radeon_mn.c mutex_lock(&rmn->lock); rmn 249 drivers/gpu/drm/radeon/radeon_mn.c interval_tree_remove(&node->it, &rmn->objects); rmn 253 drivers/gpu/drm/radeon/radeon_mn.c mutex_unlock(&rmn->lock); rmn 255 drivers/gpu/drm/radeon/radeon_mn.c mmu_notifier_put(&rmn->mn);