Lines Matching refs:bo

43 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
50 static void radeon_update_memory_usage(struct radeon_bo *bo, in radeon_update_memory_usage() argument
53 struct radeon_device *rdev = bo->rdev; in radeon_update_memory_usage()
54 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage()
74 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
76 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
78 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy()
80 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
81 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
82 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
83 radeon_bo_clear_surface_reg(bo); in radeon_ttm_bo_destroy()
84 WARN_ON(!list_empty(&bo->va)); in radeon_ttm_bo_destroy()
85 drm_gem_object_release(&bo->gem_base); in radeon_ttm_bo_destroy()
86 kfree(bo); in radeon_ttm_bo_destroy()
89 bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo) in radeon_ttm_bo_is_radeon_bo() argument
91 if (bo->destroy == &radeon_ttm_bo_destroy) in radeon_ttm_bo_is_radeon_bo()
184 struct radeon_bo *bo; in radeon_bo_create() local
204 bo = kzalloc(sizeof(struct radeon_bo), GFP_KERNEL); in radeon_bo_create()
205 if (bo == NULL) in radeon_bo_create()
207 r = drm_gem_object_init(rdev->ddev, &bo->gem_base, size); in radeon_bo_create()
209 kfree(bo); in radeon_bo_create()
212 bo->rdev = rdev; in radeon_bo_create()
213 bo->surface_reg = -1; in radeon_bo_create()
214 INIT_LIST_HEAD(&bo->list); in radeon_bo_create()
215 INIT_LIST_HEAD(&bo->va); in radeon_bo_create()
216 bo->initial_domain = domain & (RADEON_GEM_DOMAIN_VRAM | in radeon_bo_create()
220 bo->flags = flags; in radeon_bo_create()
223 bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC); in radeon_bo_create()
229 bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC); in radeon_bo_create()
241 bo->flags &= ~(RADEON_GEM_GTT_WC | RADEON_GEM_GTT_UC); in radeon_bo_create()
247 bo->flags &= ~RADEON_GEM_GTT_WC; in radeon_bo_create()
250 radeon_ttm_placement_from_domain(bo, domain); in radeon_bo_create()
253 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, in radeon_bo_create()
254 &bo->placement, page_align, !kernel, NULL, in radeon_bo_create()
260 *bo_ptr = bo; in radeon_bo_create()
262 trace_radeon_bo_create(bo); in radeon_bo_create()
267 int radeon_bo_kmap(struct radeon_bo *bo, void **ptr) in radeon_bo_kmap() argument
272 if (bo->kptr) { in radeon_bo_kmap()
274 *ptr = bo->kptr; in radeon_bo_kmap()
278 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap()
282 bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in radeon_bo_kmap()
284 *ptr = bo->kptr; in radeon_bo_kmap()
286 radeon_bo_check_tiling(bo, 0, 0); in radeon_bo_kmap()
290 void radeon_bo_kunmap(struct radeon_bo *bo) in radeon_bo_kunmap() argument
292 if (bo->kptr == NULL) in radeon_bo_kunmap()
294 bo->kptr = NULL; in radeon_bo_kunmap()
295 radeon_bo_check_tiling(bo, 0, 0); in radeon_bo_kunmap()
296 ttm_bo_kunmap(&bo->kmap); in radeon_bo_kunmap()
299 struct radeon_bo *radeon_bo_ref(struct radeon_bo *bo) in radeon_bo_ref() argument
301 if (bo == NULL) in radeon_bo_ref()
304 ttm_bo_reference(&bo->tbo); in radeon_bo_ref()
305 return bo; in radeon_bo_ref()
308 void radeon_bo_unref(struct radeon_bo **bo) in radeon_bo_unref() argument
313 if ((*bo) == NULL) in radeon_bo_unref()
315 rdev = (*bo)->rdev; in radeon_bo_unref()
316 tbo = &((*bo)->tbo); in radeon_bo_unref()
319 *bo = NULL; in radeon_bo_unref()
322 int radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset, in radeon_bo_pin_restricted() argument
327 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_bo_pin_restricted()
330 if (bo->pin_count) { in radeon_bo_pin_restricted()
331 bo->pin_count++; in radeon_bo_pin_restricted()
333 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted()
339 domain_start = bo->rdev->mc.vram_start; in radeon_bo_pin_restricted()
341 domain_start = bo->rdev->mc.gtt_start; in radeon_bo_pin_restricted()
343 (radeon_bo_gpu_offset(bo) - domain_start)); in radeon_bo_pin_restricted()
348 radeon_ttm_placement_from_domain(bo, domain); in radeon_bo_pin_restricted()
349 for (i = 0; i < bo->placement.num_placement; i++) { in radeon_bo_pin_restricted()
351 if ((bo->placements[i].flags & TTM_PL_FLAG_VRAM) && in radeon_bo_pin_restricted()
352 !(bo->flags & RADEON_GEM_NO_CPU_ACCESS) && in radeon_bo_pin_restricted()
353 (!max_offset || max_offset > bo->rdev->mc.visible_vram_size)) in radeon_bo_pin_restricted()
354 bo->placements[i].lpfn = in radeon_bo_pin_restricted()
355 bo->rdev->mc.visible_vram_size >> PAGE_SHIFT; in radeon_bo_pin_restricted()
357 bo->placements[i].lpfn = max_offset >> PAGE_SHIFT; in radeon_bo_pin_restricted()
359 bo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT; in radeon_bo_pin_restricted()
362 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_bo_pin_restricted()
364 bo->pin_count = 1; in radeon_bo_pin_restricted()
366 *gpu_addr = radeon_bo_gpu_offset(bo); in radeon_bo_pin_restricted()
368 bo->rdev->vram_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
370 bo->rdev->gart_pin_size += radeon_bo_size(bo); in radeon_bo_pin_restricted()
372 dev_err(bo->rdev->dev, "%p pin failed\n", bo); in radeon_bo_pin_restricted()
377 int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) in radeon_bo_pin() argument
379 return radeon_bo_pin_restricted(bo, domain, 0, gpu_addr); in radeon_bo_pin()
382 int radeon_bo_unpin(struct radeon_bo *bo) in radeon_bo_unpin() argument
386 if (!bo->pin_count) { in radeon_bo_unpin()
387 dev_warn(bo->rdev->dev, "%p unpin not necessary\n", bo); in radeon_bo_unpin()
390 bo->pin_count--; in radeon_bo_unpin()
391 if (bo->pin_count) in radeon_bo_unpin()
393 for (i = 0; i < bo->placement.num_placement; i++) { in radeon_bo_unpin()
394 bo->placements[i].lpfn = 0; in radeon_bo_unpin()
395 bo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT; in radeon_bo_unpin()
397 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_bo_unpin()
399 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_bo_unpin()
400 bo->rdev->vram_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
402 bo->rdev->gart_pin_size -= radeon_bo_size(bo); in radeon_bo_unpin()
404 dev_err(bo->rdev->dev, "%p validate failed for unpin\n", bo); in radeon_bo_unpin()
422 struct radeon_bo *bo, *n; in radeon_bo_force_delete() local
428 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_bo_force_delete()
431 &bo->gem_base, bo, (unsigned long)bo->gem_base.size, in radeon_bo_force_delete()
432 *((unsigned long *)&bo->gem_base.refcount)); in radeon_bo_force_delete()
433 mutex_lock(&bo->rdev->gem.mutex); in radeon_bo_force_delete()
434 list_del_init(&bo->list); in radeon_bo_force_delete()
435 mutex_unlock(&bo->rdev->gem.mutex); in radeon_bo_force_delete()
437 drm_gem_object_unreference(&bo->gem_base); in radeon_bo_force_delete()
532 struct radeon_bo *bo = lobj->robj; in radeon_bo_list_validate() local
533 if (!bo->pin_count) { in radeon_bo_list_validate()
537 radeon_mem_type_to_domain(bo->tbo.mem.mem_type); in radeon_bo_list_validate()
555 radeon_ttm_placement_from_domain(bo, domain); in radeon_bo_list_validate()
557 radeon_uvd_force_into_uvd_segment(bo, allowed); in radeon_bo_list_validate()
560 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_bo_list_validate()
574 lobj->gpu_offset = radeon_bo_gpu_offset(bo); in radeon_bo_list_validate()
575 lobj->tiling_flags = bo->tiling_flags; in radeon_bo_list_validate()
586 int radeon_bo_get_surface_reg(struct radeon_bo *bo) in radeon_bo_get_surface_reg() argument
588 struct radeon_device *rdev = bo->rdev; in radeon_bo_get_surface_reg()
594 lockdep_assert_held(&bo->tbo.resv->lock.base); in radeon_bo_get_surface_reg()
596 if (!bo->tiling_flags) in radeon_bo_get_surface_reg()
599 if (bo->surface_reg >= 0) { in radeon_bo_get_surface_reg()
600 reg = &rdev->surface_regs[bo->surface_reg]; in radeon_bo_get_surface_reg()
601 i = bo->surface_reg; in radeon_bo_get_surface_reg()
609 if (!reg->bo) in radeon_bo_get_surface_reg()
612 old_object = reg->bo; in radeon_bo_get_surface_reg()
623 old_object = reg->bo; in radeon_bo_get_surface_reg()
631 bo->surface_reg = i; in radeon_bo_get_surface_reg()
632 reg->bo = bo; in radeon_bo_get_surface_reg()
635 radeon_set_surface_reg(rdev, i, bo->tiling_flags, bo->pitch, in radeon_bo_get_surface_reg()
636 bo->tbo.mem.start << PAGE_SHIFT, in radeon_bo_get_surface_reg()
637 bo->tbo.num_pages << PAGE_SHIFT); in radeon_bo_get_surface_reg()
641 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo) in radeon_bo_clear_surface_reg() argument
643 struct radeon_device *rdev = bo->rdev; in radeon_bo_clear_surface_reg()
646 if (bo->surface_reg == -1) in radeon_bo_clear_surface_reg()
649 reg = &rdev->surface_regs[bo->surface_reg]; in radeon_bo_clear_surface_reg()
650 radeon_clear_surface_reg(rdev, bo->surface_reg); in radeon_bo_clear_surface_reg()
652 reg->bo = NULL; in radeon_bo_clear_surface_reg()
653 bo->surface_reg = -1; in radeon_bo_clear_surface_reg()
656 int radeon_bo_set_tiling_flags(struct radeon_bo *bo, in radeon_bo_set_tiling_flags() argument
659 struct radeon_device *rdev = bo->rdev; in radeon_bo_set_tiling_flags()
707 r = radeon_bo_reserve(bo, false); in radeon_bo_set_tiling_flags()
710 bo->tiling_flags = tiling_flags; in radeon_bo_set_tiling_flags()
711 bo->pitch = pitch; in radeon_bo_set_tiling_flags()
712 radeon_bo_unreserve(bo); in radeon_bo_set_tiling_flags()
716 void radeon_bo_get_tiling_flags(struct radeon_bo *bo, in radeon_bo_get_tiling_flags() argument
720 lockdep_assert_held(&bo->tbo.resv->lock.base); in radeon_bo_get_tiling_flags()
723 *tiling_flags = bo->tiling_flags; in radeon_bo_get_tiling_flags()
725 *pitch = bo->pitch; in radeon_bo_get_tiling_flags()
728 int radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved, in radeon_bo_check_tiling() argument
732 lockdep_assert_held(&bo->tbo.resv->lock.base); in radeon_bo_check_tiling()
734 if (!(bo->tiling_flags & RADEON_TILING_SURFACE)) in radeon_bo_check_tiling()
738 radeon_bo_clear_surface_reg(bo); in radeon_bo_check_tiling()
742 if (bo->tbo.mem.mem_type != TTM_PL_VRAM) { in radeon_bo_check_tiling()
746 if (bo->surface_reg >= 0) in radeon_bo_check_tiling()
747 radeon_bo_clear_surface_reg(bo); in radeon_bo_check_tiling()
751 if ((bo->surface_reg >= 0) && !has_moved) in radeon_bo_check_tiling()
754 return radeon_bo_get_surface_reg(bo); in radeon_bo_check_tiling()
757 void radeon_bo_move_notify(struct ttm_buffer_object *bo, in radeon_bo_move_notify() argument
762 if (!radeon_ttm_bo_is_radeon_bo(bo)) in radeon_bo_move_notify()
765 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_bo_move_notify()
773 radeon_update_memory_usage(rbo, bo->mem.mem_type, -1); in radeon_bo_move_notify()
777 int radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) in radeon_bo_fault_reserve_notify() argument
784 if (!radeon_ttm_bo_is_radeon_bo(bo)) in radeon_bo_fault_reserve_notify()
786 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_bo_fault_reserve_notify()
789 if (bo->mem.mem_type != TTM_PL_VRAM) in radeon_bo_fault_reserve_notify()
792 size = bo->mem.num_pages << PAGE_SHIFT; in radeon_bo_fault_reserve_notify()
793 offset = bo->mem.start << PAGE_SHIFT; in radeon_bo_fault_reserve_notify()
806 r = ttm_bo_validate(bo, &rbo->placement, false, false); in radeon_bo_fault_reserve_notify()
809 return ttm_bo_validate(bo, &rbo->placement, false, false); in radeon_bo_fault_reserve_notify()
814 offset = bo->mem.start << PAGE_SHIFT; in radeon_bo_fault_reserve_notify()
822 int radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type, bool no_wait) in radeon_bo_wait() argument
826 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in radeon_bo_wait()
830 *mem_type = bo->tbo.mem.mem_type; in radeon_bo_wait()
832 r = ttm_bo_wait(&bo->tbo, true, true, no_wait); in radeon_bo_wait()
833 ttm_bo_unreserve(&bo->tbo); in radeon_bo_wait()
845 void radeon_bo_fence(struct radeon_bo *bo, struct radeon_fence *fence, in radeon_bo_fence() argument
848 struct reservation_object *resv = bo->tbo.resv; in radeon_bo_fence()