Lines Matching refs:adev
45 int amdgpu_gem_object_create(struct amdgpu_device *adev, unsigned long size, in amdgpu_gem_object_create() argument
64 max_size = adev->mc.gtt_size - adev->gart_pin_size; in amdgpu_gem_object_create()
72 r = amdgpu_bo_create(adev, size, alignment, kernel, initial_domain, in amdgpu_gem_object_create()
88 mutex_lock(&adev->gem.mutex); in amdgpu_gem_object_create()
89 list_add_tail(&robj->list, &adev->gem.objects); in amdgpu_gem_object_create()
90 mutex_unlock(&adev->gem.mutex); in amdgpu_gem_object_create()
95 int amdgpu_gem_init(struct amdgpu_device *adev) in amdgpu_gem_init() argument
97 INIT_LIST_HEAD(&adev->gem.objects); in amdgpu_gem_init()
101 void amdgpu_gem_fini(struct amdgpu_device *adev) in amdgpu_gem_fini() argument
103 amdgpu_bo_force_delete(adev); in amdgpu_gem_fini()
113 struct amdgpu_device *adev = rbo->adev; in amdgpu_gem_object_open() local
124 bo_va = amdgpu_vm_bo_add(adev, vm, rbo); in amdgpu_gem_object_open()
136 struct amdgpu_device *adev = rbo->adev; in amdgpu_gem_object_close() local
143 dev_err(adev->dev, "leaking bo va because " in amdgpu_gem_object_close()
150 amdgpu_vm_bo_rmv(adev, bo_va); in amdgpu_gem_object_close()
156 static int amdgpu_gem_handle_lockup(struct amdgpu_device *adev, int r) in amdgpu_gem_handle_lockup() argument
159 r = amdgpu_gpu_reset(adev); in amdgpu_gem_handle_lockup()
172 struct amdgpu_device *adev = dev->dev_private; in amdgpu_gem_create_ioctl() local
197 r = amdgpu_gem_object_create(adev, size, args->in.alignment, in amdgpu_gem_create_ioctl()
215 r = amdgpu_gem_handle_lockup(adev, r); in amdgpu_gem_create_ioctl()
222 struct amdgpu_device *adev = dev->dev_private; in amdgpu_gem_userptr_ioctl() local
248 r = amdgpu_gem_object_create(adev, args->size, 0, in amdgpu_gem_userptr_ioctl()
294 r = amdgpu_gem_handle_lockup(adev, r); in amdgpu_gem_userptr_ioctl()
361 struct amdgpu_device *adev = dev->dev_private; in amdgpu_gem_wait_idle_ioctl() local
391 r = amdgpu_gem_handle_lockup(adev, r); in amdgpu_gem_wait_idle_ioctl()
447 static void amdgpu_gem_va_update_vm(struct amdgpu_device *adev, in amdgpu_gem_va_update_vm() argument
464 vm_bos = amdgpu_vm_get_bos(adev, bo_va->vm, &list); in amdgpu_gem_va_update_vm()
488 r = amdgpu_vm_update_page_directory(adev, bo_va->vm); in amdgpu_gem_va_update_vm()
492 r = amdgpu_vm_clear_freed(adev, bo_va->vm); in amdgpu_gem_va_update_vm()
497 r = amdgpu_vm_bo_update(adev, bo_va, &bo_va->bo->tbo.mem); in amdgpu_gem_va_update_vm()
516 struct amdgpu_device *adev = dev->dev_private; in amdgpu_gem_va_ioctl() local
526 if (!adev->vm_manager.enabled) in amdgpu_gem_va_ioctl()
591 r = amdgpu_vm_bo_map(adev, bo_va, args->va_address, in amdgpu_gem_va_ioctl()
596 r = amdgpu_vm_bo_unmap(adev, bo_va, args->va_address); in amdgpu_gem_va_ioctl()
603 amdgpu_gem_va_update_vm(adev, bo_va, args->operation); in amdgpu_gem_va_ioctl()
666 struct amdgpu_device *adev = dev->dev_private; in amdgpu_mode_dumb_create() local
671 args->pitch = amdgpu_align_pitch(adev, args->width, args->bpp, 0) * ((args->bpp + 1) / 8); in amdgpu_mode_dumb_create()
675 r = amdgpu_gem_object_create(adev, args->size, 0, in amdgpu_mode_dumb_create()
698 struct amdgpu_device *adev = dev->dev_private; in amdgpu_debugfs_gem_info() local
702 mutex_lock(&adev->gem.mutex); in amdgpu_debugfs_gem_info()
703 list_for_each_entry(rbo, &adev->gem.objects, list) { in amdgpu_debugfs_gem_info()
725 mutex_unlock(&adev->gem.mutex); in amdgpu_debugfs_gem_info()
734 int amdgpu_gem_debugfs_init(struct amdgpu_device *adev) in amdgpu_gem_debugfs_init() argument
737 return amdgpu_debugfs_add_files(adev, amdgpu_debugfs_gem_list, 1); in amdgpu_gem_debugfs_init()