Lines Matching refs:bo

94 	struct amdgpu_bo *bo;  in amdgpu_ttm_bo_destroy()  local
96 bo = container_of(tbo, struct amdgpu_bo, tbo); in amdgpu_ttm_bo_destroy()
98 amdgpu_update_memory_usage(bo->adev, &bo->tbo.mem, NULL); in amdgpu_ttm_bo_destroy()
100 mutex_lock(&bo->adev->gem.mutex); in amdgpu_ttm_bo_destroy()
101 list_del_init(&bo->list); in amdgpu_ttm_bo_destroy()
102 mutex_unlock(&bo->adev->gem.mutex); in amdgpu_ttm_bo_destroy()
103 drm_gem_object_release(&bo->gem_base); in amdgpu_ttm_bo_destroy()
104 amdgpu_bo_unref(&bo->parent); in amdgpu_ttm_bo_destroy()
105 kfree(bo->metadata); in amdgpu_ttm_bo_destroy()
106 kfree(bo); in amdgpu_ttm_bo_destroy()
109 bool amdgpu_ttm_bo_is_amdgpu_bo(struct ttm_buffer_object *bo) in amdgpu_ttm_bo_is_amdgpu_bo() argument
111 if (bo->destroy == &amdgpu_ttm_bo_destroy) in amdgpu_ttm_bo_is_amdgpu_bo()
204 static void amdgpu_fill_placement_to_bo(struct amdgpu_bo *bo, in amdgpu_fill_placement_to_bo() argument
209 memcpy(bo->placements, placement->placement, in amdgpu_fill_placement_to_bo()
211 bo->placement.num_placement = placement->num_placement; in amdgpu_fill_placement_to_bo()
212 bo->placement.num_busy_placement = placement->num_busy_placement; in amdgpu_fill_placement_to_bo()
213 bo->placement.placement = bo->placements; in amdgpu_fill_placement_to_bo()
214 bo->placement.busy_placement = bo->placements; in amdgpu_fill_placement_to_bo()
225 struct amdgpu_bo *bo; in amdgpu_bo_create_restricted() local
246 bo = kzalloc(sizeof(struct amdgpu_bo), GFP_KERNEL); in amdgpu_bo_create_restricted()
247 if (bo == NULL) in amdgpu_bo_create_restricted()
249 r = drm_gem_object_init(adev->ddev, &bo->gem_base, size); in amdgpu_bo_create_restricted()
251 kfree(bo); in amdgpu_bo_create_restricted()
254 bo->adev = adev; in amdgpu_bo_create_restricted()
255 INIT_LIST_HEAD(&bo->list); in amdgpu_bo_create_restricted()
256 INIT_LIST_HEAD(&bo->va); in amdgpu_bo_create_restricted()
257 bo->initial_domain = domain & (AMDGPU_GEM_DOMAIN_VRAM | in amdgpu_bo_create_restricted()
264 bo->flags = flags; in amdgpu_bo_create_restricted()
270 bo->flags &= ~AMDGPU_GEM_CREATE_CPU_GTT_USWC; in amdgpu_bo_create_restricted()
272 amdgpu_fill_placement_to_bo(bo, placement); in amdgpu_bo_create_restricted()
274 r = ttm_bo_init(&adev->mman.bdev, &bo->tbo, size, type, in amdgpu_bo_create_restricted()
275 &bo->placement, page_align, !kernel, NULL, in amdgpu_bo_create_restricted()
280 *bo_ptr = bo; in amdgpu_bo_create_restricted()
282 trace_amdgpu_bo_create(bo); in amdgpu_bo_create_restricted()
308 int amdgpu_bo_kmap(struct amdgpu_bo *bo, void **ptr) in amdgpu_bo_kmap() argument
313 if (bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS) in amdgpu_bo_kmap()
316 if (bo->kptr) { in amdgpu_bo_kmap()
318 *ptr = bo->kptr; in amdgpu_bo_kmap()
322 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in amdgpu_bo_kmap()
326 bo->kptr = ttm_kmap_obj_virtual(&bo->kmap, &is_iomem); in amdgpu_bo_kmap()
328 *ptr = bo->kptr; in amdgpu_bo_kmap()
333 void amdgpu_bo_kunmap(struct amdgpu_bo *bo) in amdgpu_bo_kunmap() argument
335 if (bo->kptr == NULL) in amdgpu_bo_kunmap()
337 bo->kptr = NULL; in amdgpu_bo_kunmap()
338 ttm_bo_kunmap(&bo->kmap); in amdgpu_bo_kunmap()
341 struct amdgpu_bo *amdgpu_bo_ref(struct amdgpu_bo *bo) in amdgpu_bo_ref() argument
343 if (bo == NULL) in amdgpu_bo_ref()
346 ttm_bo_reference(&bo->tbo); in amdgpu_bo_ref()
347 return bo; in amdgpu_bo_ref()
350 void amdgpu_bo_unref(struct amdgpu_bo **bo) in amdgpu_bo_unref() argument
354 if ((*bo) == NULL) in amdgpu_bo_unref()
357 tbo = &((*bo)->tbo); in amdgpu_bo_unref()
360 *bo = NULL; in amdgpu_bo_unref()
363 int amdgpu_bo_pin_restricted(struct amdgpu_bo *bo, u32 domain, in amdgpu_bo_pin_restricted() argument
370 if (amdgpu_ttm_tt_has_userptr(bo->tbo.ttm)) in amdgpu_bo_pin_restricted()
376 if (bo->pin_count) { in amdgpu_bo_pin_restricted()
377 bo->pin_count++; in amdgpu_bo_pin_restricted()
379 *gpu_addr = amdgpu_bo_gpu_offset(bo); in amdgpu_bo_pin_restricted()
384 domain_start = bo->adev->mc.vram_start; in amdgpu_bo_pin_restricted()
386 domain_start = bo->adev->mc.gtt_start; in amdgpu_bo_pin_restricted()
388 (amdgpu_bo_gpu_offset(bo) - domain_start)); in amdgpu_bo_pin_restricted()
393 amdgpu_ttm_placement_from_domain(bo, domain); in amdgpu_bo_pin_restricted()
394 for (i = 0; i < bo->placement.num_placement; i++) { in amdgpu_bo_pin_restricted()
396 if ((bo->placements[i].flags & TTM_PL_FLAG_VRAM) && in amdgpu_bo_pin_restricted()
397 !(bo->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS) && in amdgpu_bo_pin_restricted()
398 (!max_offset || max_offset > bo->adev->mc.visible_vram_size)) { in amdgpu_bo_pin_restricted()
400 bo->adev->mc.visible_vram_size)) in amdgpu_bo_pin_restricted()
403 lpfn = bo->adev->mc.visible_vram_size >> PAGE_SHIFT; in amdgpu_bo_pin_restricted()
408 if (fpfn > bo->placements[i].fpfn) in amdgpu_bo_pin_restricted()
409 bo->placements[i].fpfn = fpfn; in amdgpu_bo_pin_restricted()
410 if (!bo->placements[i].lpfn || in amdgpu_bo_pin_restricted()
411 (lpfn && lpfn < bo->placements[i].lpfn)) in amdgpu_bo_pin_restricted()
412 bo->placements[i].lpfn = lpfn; in amdgpu_bo_pin_restricted()
413 bo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT; in amdgpu_bo_pin_restricted()
416 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in amdgpu_bo_pin_restricted()
418 bo->pin_count = 1; in amdgpu_bo_pin_restricted()
420 *gpu_addr = amdgpu_bo_gpu_offset(bo); in amdgpu_bo_pin_restricted()
422 bo->adev->vram_pin_size += amdgpu_bo_size(bo); in amdgpu_bo_pin_restricted()
424 bo->adev->gart_pin_size += amdgpu_bo_size(bo); in amdgpu_bo_pin_restricted()
426 dev_err(bo->adev->dev, "%p pin failed\n", bo); in amdgpu_bo_pin_restricted()
431 int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain, u64 *gpu_addr) in amdgpu_bo_pin() argument
433 return amdgpu_bo_pin_restricted(bo, domain, 0, 0, gpu_addr); in amdgpu_bo_pin()
436 int amdgpu_bo_unpin(struct amdgpu_bo *bo) in amdgpu_bo_unpin() argument
440 if (!bo->pin_count) { in amdgpu_bo_unpin()
441 dev_warn(bo->adev->dev, "%p unpin not necessary\n", bo); in amdgpu_bo_unpin()
444 bo->pin_count--; in amdgpu_bo_unpin()
445 if (bo->pin_count) in amdgpu_bo_unpin()
447 for (i = 0; i < bo->placement.num_placement; i++) { in amdgpu_bo_unpin()
448 bo->placements[i].lpfn = 0; in amdgpu_bo_unpin()
449 bo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT; in amdgpu_bo_unpin()
451 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in amdgpu_bo_unpin()
453 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in amdgpu_bo_unpin()
454 bo->adev->vram_pin_size -= amdgpu_bo_size(bo); in amdgpu_bo_unpin()
456 bo->adev->gart_pin_size -= amdgpu_bo_size(bo); in amdgpu_bo_unpin()
458 dev_err(bo->adev->dev, "%p validate failed for unpin\n", bo); in amdgpu_bo_unpin()
475 struct amdgpu_bo *bo, *n; in amdgpu_bo_force_delete() local
481 list_for_each_entry_safe(bo, n, &adev->gem.objects, list) { in amdgpu_bo_force_delete()
483 &bo->gem_base, bo, (unsigned long)bo->gem_base.size, in amdgpu_bo_force_delete()
484 *((unsigned long *)&bo->gem_base.refcount)); in amdgpu_bo_force_delete()
485 mutex_lock(&bo->adev->gem.mutex); in amdgpu_bo_force_delete()
486 list_del_init(&bo->list); in amdgpu_bo_force_delete()
487 mutex_unlock(&bo->adev->gem.mutex); in amdgpu_bo_force_delete()
489 drm_gem_object_unreference_unlocked(&bo->gem_base); in amdgpu_bo_force_delete()
512 int amdgpu_bo_fbdev_mmap(struct amdgpu_bo *bo, in amdgpu_bo_fbdev_mmap() argument
515 return ttm_fbdev_mmap(vma, &bo->tbo); in amdgpu_bo_fbdev_mmap()
518 int amdgpu_bo_set_tiling_flags(struct amdgpu_bo *bo, u64 tiling_flags) in amdgpu_bo_set_tiling_flags() argument
523 bo->tiling_flags = tiling_flags; in amdgpu_bo_set_tiling_flags()
527 void amdgpu_bo_get_tiling_flags(struct amdgpu_bo *bo, u64 *tiling_flags) in amdgpu_bo_get_tiling_flags() argument
529 lockdep_assert_held(&bo->tbo.resv->lock.base); in amdgpu_bo_get_tiling_flags()
532 *tiling_flags = bo->tiling_flags; in amdgpu_bo_get_tiling_flags()
535 int amdgpu_bo_set_metadata (struct amdgpu_bo *bo, void *metadata, in amdgpu_bo_set_metadata() argument
541 if (bo->metadata_size) { in amdgpu_bo_set_metadata()
542 kfree(bo->metadata); in amdgpu_bo_set_metadata()
543 bo->metadata = NULL; in amdgpu_bo_set_metadata()
544 bo->metadata_size = 0; in amdgpu_bo_set_metadata()
556 kfree(bo->metadata); in amdgpu_bo_set_metadata()
557 bo->metadata_flags = flags; in amdgpu_bo_set_metadata()
558 bo->metadata = buffer; in amdgpu_bo_set_metadata()
559 bo->metadata_size = metadata_size; in amdgpu_bo_set_metadata()
564 int amdgpu_bo_get_metadata(struct amdgpu_bo *bo, void *buffer, in amdgpu_bo_get_metadata() argument
572 if (buffer_size < bo->metadata_size) in amdgpu_bo_get_metadata()
575 if (bo->metadata_size) in amdgpu_bo_get_metadata()
576 memcpy(buffer, bo->metadata, bo->metadata_size); in amdgpu_bo_get_metadata()
580 *metadata_size = bo->metadata_size; in amdgpu_bo_get_metadata()
582 *flags = bo->metadata_flags; in amdgpu_bo_get_metadata()
587 void amdgpu_bo_move_notify(struct ttm_buffer_object *bo, in amdgpu_bo_move_notify() argument
592 if (!amdgpu_ttm_bo_is_amdgpu_bo(bo)) in amdgpu_bo_move_notify()
595 rbo = container_of(bo, struct amdgpu_bo, tbo); in amdgpu_bo_move_notify()
603 amdgpu_update_memory_usage(rbo->adev, &bo->mem, new_mem); in amdgpu_bo_move_notify()
606 int amdgpu_bo_fault_reserve_notify(struct ttm_buffer_object *bo) in amdgpu_bo_fault_reserve_notify() argument
613 if (!amdgpu_ttm_bo_is_amdgpu_bo(bo)) in amdgpu_bo_fault_reserve_notify()
616 abo = container_of(bo, struct amdgpu_bo, tbo); in amdgpu_bo_fault_reserve_notify()
618 if (bo->mem.mem_type != TTM_PL_VRAM) in amdgpu_bo_fault_reserve_notify()
621 size = bo->mem.num_pages << PAGE_SHIFT; in amdgpu_bo_fault_reserve_notify()
622 offset = bo->mem.start << PAGE_SHIFT; in amdgpu_bo_fault_reserve_notify()
635 r = ttm_bo_validate(bo, &abo->placement, false, false); in amdgpu_bo_fault_reserve_notify()
638 return ttm_bo_validate(bo, &abo->placement, false, false); in amdgpu_bo_fault_reserve_notify()
643 offset = bo->mem.start << PAGE_SHIFT; in amdgpu_bo_fault_reserve_notify()
659 void amdgpu_bo_fence(struct amdgpu_bo *bo, struct fence *fence, in amdgpu_bo_fence() argument
662 struct reservation_object *resv = bo->tbo.resv; in amdgpu_bo_fence()