/linux-4.1.27/drivers/gpu/drm/qxl/ |
D | qxl_object.c | 30 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument 35 bo = container_of(tbo, struct qxl_bo, tbo); in qxl_ttm_bo_destroy() 111 r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type, in qxl_bo_create() 135 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in qxl_bo_kmap() 147 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page() 152 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in qxl_bo_kmap_atomic_page() 154 else if (bo->tbo.mem.mem_type == TTM_PL_PRIV0) in qxl_bo_kmap_atomic_page() 160 ret = ttm_mem_io_reserve(bo->tbo.bdev, &bo->tbo.mem); in qxl_bo_kmap_atomic_page() 163 return io_mapping_map_atomic_wc(map, bo->tbo.mem.bus.offset + page_offset); in qxl_bo_kmap_atomic_page() 189 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page() [all …]
|
D | qxl_object.h | 34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in qxl_bo_reserve() 47 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 52 return bo->tbo.offset; in qxl_bo_gpu_offset() 57 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size() 62 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in qxl_bo_mmap_offset() 70 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in qxl_bo_wait() 80 *mem_type = bo->tbo.mem.mem_type; in qxl_bo_wait() 82 r = ttm_bo_wait(&bo->tbo, true, true, no_wait); in qxl_bo_wait() 83 ttm_bo_unreserve(&bo->tbo); in qxl_bo_wait()
|
D | qxl_release.c | 219 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add() 228 entry->tv.bo = &bo->tbo; in qxl_release_list_add() 240 ret = ttm_bo_validate(&bo->tbo, &bo->placement, in qxl_release_validate_bo() 246 ret = reservation_object_reserve_shared(bo->tbo.resv); in qxl_release_validate_bo()
|
D | qxl_drv.h | 107 struct ttm_buffer_object tbo; member 122 #define to_qxl_bo(tobj) container_of((tobj), struct qxl_bo, tbo) 378 return slot->high_bits | (bo->tbo.offset + offset); in qxl_bo_physical_address()
|
D | qxl_debugfs.c | 65 fobj = rcu_dereference(bo->tbo.resv->fence); in qxl_debugfs_buffers_info()
|
D | qxl_ttm.c | 204 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_evict_flags() 368 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_bo_move_notify()
|
D | qxl_cmd.c | 520 …cmd->u.surface_create.data |= (new_mem->start << PAGE_SHIFT) + surf->tbo.bdev->man[new_mem->mem_ty… in qxl_hw_surface_alloc() 627 ret = ttm_bo_wait(&surf->tbo, true, true, !stall); in qxl_reap_surf()
|
D | qxl_ioctl.c | 343 ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, in qxl_update_area_ioctl()
|
/linux-4.1.27/drivers/gpu/drm/radeon/ |
D | radeon_object.c | 54 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage() 72 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument 76 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 78 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy() 253 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, in radeon_bo_create() 278 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap() 304 ttm_bo_reference(&bo->tbo); in radeon_bo_ref() 310 struct ttm_buffer_object *tbo; in radeon_bo_unref() local 316 tbo = &((*bo)->tbo); in radeon_bo_unref() 317 ttm_bo_unref(&tbo); in radeon_bo_unref() [all …]
|
D | radeon_prime.c | 35 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table() 37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table() 45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap() 116 return bo->tbo.resv; in radeon_gem_prime_res_obj() 124 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_gem_prime_export()
|
D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, false, NULL); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 93 return bo->tbo.offset; in radeon_bo_gpu_offset() 98 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size() 103 return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 108 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 119 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in radeon_bo_mmap_offset()
|
D | radeon_gem.c | 38 drm_prime_gem_destroy(&robj->gem_base, robj->tbo.sg); in radeon_gem_object_free() 114 r = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, 30 * HZ); in radeon_gem_set_domain() 323 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl() 342 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_gem_userptr_ioctl() 412 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) { in radeon_mode_dumb_mmap() 468 ret = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, 30 * HZ); in radeon_gem_wait_idle_ioctl() 475 cur_placement = ACCESS_ONCE(robj->tbo.mem.mem_type); in radeon_gem_wait_idle_ioctl() 546 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm() 572 r = radeon_vm_bo_update(rdev, bo_va, &bo_va->bo->tbo.mem); in radeon_gem_va_update_vm() 707 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) in radeon_gem_op_ioctl() [all …]
|
D | radeon_mn.c | 145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start() 154 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in radeon_mn_invalidate_range_start() 160 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_mn_invalidate_range_start()
|
D | radeon_cs.c | 154 if (radeon_ttm_tt_has_userptr(p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs() 167 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs() 241 resv = reloc->robj->tbo.resv; in radeon_cs_sync_rings() 385 return (int)la->robj->tbo.num_pages - (int)lb->robj->tbo.num_pages; in cmp_size_smaller_first() 499 &rdev->ring_tmp_bo.bo->tbo.mem); in radeon_bo_vm_update_pte() 513 r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem); in radeon_bo_vm_update_pte()
|
D | radeon_benchmark.c | 125 dobj->tbo.resv); in radeon_benchmark_move() 136 dobj->tbo.resv); in radeon_benchmark_move()
|
D | radeon_test.c | 122 vram_obj->tbo.resv); in radeon_do_test_moves() 126 vram_obj->tbo.resv); in radeon_do_test_moves() 173 vram_obj->tbo.resv); in radeon_do_test_moves() 177 vram_obj->tbo.resv); in radeon_do_test_moves()
|
D | radeon_vm.c | 144 list[0].tv.bo = &vm->page_directory->tbo; in radeon_vm_get_bos() 156 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos() 400 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_vm_clear_bo() 706 radeon_sync_resv(rdev, &ib.sync, pd->tbo.resv, true); in radeon_vm_update_page_directory() 834 radeon_sync_resv(rdev, &ib->sync, pt->tbo.resv, true); in radeon_vm_update_ptes() 835 r = reservation_object_reserve_shared(pt->tbo.resv); in radeon_vm_update_ptes() 937 if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
|
D | radeon_trace.h | 24 __entry->pages = bo->tbo.num_pages;
|
D | radeon_ttm.c | 196 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags() 236 struct radeon_bo *rbo = container_of(bo, struct radeon_bo, tbo); in radeon_verify_access()
|
D | radeon_uvd.c | 436 f = reservation_object_get_excl(bo->tbo.resv); in radeon_uvd_cs_msg()
|
D | radeon_pm.c | 152 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_unmap_vram_bos() 153 ttm_bo_unmap_virtual(&bo->tbo); in radeon_unmap_vram_bos()
|
D | radeon_display.c | 506 work->fence = fence_get(reservation_object_get_excl(new_rbo->tbo.resv)); in radeon_crtc_page_flip()
|
D | radeon.h | 491 struct ttm_buffer_object tbo; member
|
/linux-4.1.27/drivers/gpu/drm/bochs/ |
D | bochs_mm.c | 74 static void bochs_bo_ttm_destroy(struct ttm_buffer_object *tbo) in bochs_bo_ttm_destroy() argument 78 bo = container_of(tbo, struct bochs_bo, bo); in bochs_bo_ttm_destroy() 436 struct ttm_buffer_object *tbo; in bochs_bo_unref() local 441 tbo = &((*bo)->bo); in bochs_bo_unref() 442 ttm_bo_unref(&tbo); in bochs_bo_unref()
|
/linux-4.1.27/drivers/gpu/drm/cirrus/ |
D | cirrus_main.c | 266 struct ttm_buffer_object *tbo; in cirrus_bo_unref() local 271 tbo = &((*bo)->bo); in cirrus_bo_unref() 272 ttm_bo_unref(&tbo); in cirrus_bo_unref()
|
D | cirrus_ttm.c | 95 static void cirrus_bo_ttm_destroy(struct ttm_buffer_object *tbo) in cirrus_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct cirrus_bo, bo); in cirrus_bo_ttm_destroy()
|
/linux-4.1.27/drivers/gpu/drm/mgag200/ |
D | mgag200_main.c | 318 struct ttm_buffer_object *tbo; in mgag200_bo_unref() local 323 tbo = &((*bo)->bo); in mgag200_bo_unref() 324 ttm_bo_unref(&tbo); in mgag200_bo_unref()
|
D | mgag200_ttm.c | 95 static void mgag200_bo_ttm_destroy(struct ttm_buffer_object *tbo) in mgag200_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct mgag200_bo, bo); in mgag200_bo_ttm_destroy()
|
/linux-4.1.27/drivers/gpu/drm/ast/ |
D | ast_main.c | 545 struct ttm_buffer_object *tbo; in ast_bo_unref() local 550 tbo = &((*bo)->bo); in ast_bo_unref() 551 ttm_bo_unref(&tbo); in ast_bo_unref()
|
D | ast_ttm.c | 95 static void ast_bo_ttm_destroy(struct ttm_buffer_object *tbo) in ast_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct ast_bo, bo); in ast_bo_ttm_destroy()
|