/linux-4.4.14/drivers/gpu/drm/virtio/ |
D | virtgpu_object.c | 28 static void virtio_gpu_ttm_bo_destroy(struct ttm_buffer_object *tbo) in virtio_gpu_ttm_bo_destroy() argument 33 bo = container_of(tbo, struct virtio_gpu_object, tbo); in virtio_gpu_ttm_bo_destroy() 89 ret = ttm_bo_init(&vgdev->mman.bdev, &bo->tbo, size, type, in virtio_gpu_object_create() 110 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in virtio_gpu_object_kmap() 123 struct page **pages = bo->tbo.ttm->pages; in virtio_gpu_object_get_sg_table() 124 int nr_pages = bo->tbo.num_pages; in virtio_gpu_object_get_sg_table() 130 if (bo->tbo.ttm->state == tt_unpopulated) in virtio_gpu_object_get_sg_table() 131 bo->tbo.ttm->bdev->driver->ttm_tt_populate(bo->tbo.ttm); in virtio_gpu_object_get_sg_table() 158 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in virtio_gpu_object_wait() 161 r = ttm_bo_wait(&bo->tbo, true, true, no_wait); in virtio_gpu_object_wait() [all …]
|
D | virtgpu_drv.h | 63 struct ttm_buffer_object tbo; member 377 ttm_bo_reference(&bo->tbo); in virtio_gpu_object_ref() 383 struct ttm_buffer_object *tbo; in virtio_gpu_object_unref() local 387 tbo = &((*bo)->tbo); in virtio_gpu_object_unref() 388 ttm_bo_unref(&tbo); in virtio_gpu_object_unref() 389 if (tbo == NULL) in virtio_gpu_object_unref() 395 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in virtio_gpu_object_mmap_offset() 403 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in virtio_gpu_object_reserve() 417 ttm_bo_unreserve(&bo->tbo); in virtio_gpu_object_unreserve()
|
D | virtgpu_ttm.c | 382 static void virtio_gpu_bo_move_notify(struct ttm_buffer_object *tbo, in virtio_gpu_bo_move_notify() argument 388 bo = container_of(tbo, struct virtio_gpu_object, tbo); in virtio_gpu_bo_move_notify() 404 static void virtio_gpu_bo_swap_notify(struct ttm_buffer_object *tbo) in virtio_gpu_bo_swap_notify() argument 409 bo = container_of(tbo, struct virtio_gpu_object, tbo); in virtio_gpu_bo_swap_notify()
|
D | virtgpu_ioctl.c | 69 qobj = container_of(bo, struct virtio_gpu_object, tbo); in virtio_gpu_object_list_validate() 86 qobj = container_of(bo, struct virtio_gpu_object, tbo); in virtio_gpu_unref_list() 145 buflist[i].bo = &qobj->tbo; in virtio_gpu_execbuffer() 277 mainbuf.bo = &qobj->tbo; in virtio_gpu_resource_create_ioctl() 387 ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, in virtio_gpu_transfer_from_host_ioctl() 397 reservation_object_add_excl_fence(qobj->tbo.resv, in virtio_gpu_transfer_from_host_ioctl() 431 ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, in virtio_gpu_transfer_to_host_ioctl() 446 reservation_object_add_excl_fence(qobj->tbo.resv, in virtio_gpu_transfer_to_host_ioctl()
|
D | virtgpu_display.c | 95 reservation_object_add_excl_fence(qobj->tbo.resv, in virtio_gpu_crtc_cursor_set()
|
/linux-4.4.14/drivers/gpu/drm/qxl/ |
D | qxl_object.c | 30 static void qxl_ttm_bo_destroy(struct ttm_buffer_object *tbo) in qxl_ttm_bo_destroy() argument 35 bo = container_of(tbo, struct qxl_bo, tbo); in qxl_ttm_bo_destroy() 111 r = ttm_bo_init(&qdev->mman.bdev, &bo->tbo, size, type, in qxl_bo_create() 135 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in qxl_bo_kmap() 147 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kmap_atomic_page() 152 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in qxl_bo_kmap_atomic_page() 154 else if (bo->tbo.mem.mem_type == TTM_PL_PRIV0) in qxl_bo_kmap_atomic_page() 160 ret = ttm_mem_io_reserve(bo->tbo.bdev, &bo->tbo.mem); in qxl_bo_kmap_atomic_page() 163 return io_mapping_map_atomic_wc(map, bo->tbo.mem.bus.offset + page_offset); in qxl_bo_kmap_atomic_page() 189 struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type]; in qxl_bo_kunmap_atomic_page() [all …]
|
D | qxl_object.h | 34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in qxl_bo_reserve() 47 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve() 52 return bo->tbo.offset; in qxl_bo_gpu_offset() 57 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size() 62 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in qxl_bo_mmap_offset() 70 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in qxl_bo_wait() 80 *mem_type = bo->tbo.mem.mem_type; in qxl_bo_wait() 82 r = ttm_bo_wait(&bo->tbo, true, true, no_wait); in qxl_bo_wait() 83 ttm_bo_unreserve(&bo->tbo); in qxl_bo_wait()
|
D | qxl_gem.c | 35 struct ttm_buffer_object *tbo; in qxl_gem_object_free() local 41 tbo = &qobj->tbo; in qxl_gem_object_free() 42 ttm_bo_unref(&tbo); in qxl_gem_object_free()
|
D | qxl_release.c | 219 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add() 228 entry->tv.bo = &bo->tbo; in qxl_release_list_add() 240 ret = ttm_bo_validate(&bo->tbo, &bo->placement, in qxl_release_validate_bo() 246 ret = reservation_object_reserve_shared(bo->tbo.resv); in qxl_release_validate_bo()
|
D | qxl_drv.h | 107 struct ttm_buffer_object tbo; member 122 #define to_qxl_bo(tobj) container_of((tobj), struct qxl_bo, tbo) 378 return slot->high_bits | (bo->tbo.offset + offset); in qxl_bo_physical_address()
|
D | qxl_debugfs.c | 65 fobj = rcu_dereference(bo->tbo.resv->fence); in qxl_debugfs_buffers_info()
|
D | qxl_ttm.c | 204 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_evict_flags() 368 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_bo_move_notify()
|
D | qxl_cmd.c | 520 …cmd->u.surface_create.data |= (new_mem->start << PAGE_SHIFT) + surf->tbo.bdev->man[new_mem->mem_ty… in qxl_hw_surface_alloc() 627 ret = ttm_bo_wait(&surf->tbo, true, true, !stall); in qxl_reap_surf()
|
D | qxl_ioctl.c | 329 ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, in qxl_update_area_ioctl()
|
/linux-4.4.14/drivers/gpu/drm/radeon/ |
D | radeon_object.c | 54 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage() 72 static void radeon_ttm_bo_destroy(struct ttm_buffer_object *tbo) in radeon_ttm_bo_destroy() argument 76 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy() 78 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy() 260 r = ttm_bo_init(&rdev->mman.bdev, &bo->tbo, size, type, in radeon_bo_create() 285 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in radeon_bo_kmap() 311 ttm_bo_reference(&bo->tbo); in radeon_bo_ref() 317 struct ttm_buffer_object *tbo; in radeon_bo_unref() local 323 tbo = &((*bo)->tbo); in radeon_bo_unref() 324 ttm_bo_unref(&tbo); in radeon_bo_unref() [all …]
|
D | radeon_prime.c | 35 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table() 37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table() 45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap() 116 return bo->tbo.resv; in radeon_gem_prime_res_obj() 124 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_gem_prime_export()
|
D | radeon_object.h | 68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, false, NULL); in radeon_bo_reserve() 79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve() 93 return bo->tbo.offset; in radeon_bo_gpu_offset() 98 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size() 103 return (bo->tbo.num_pages << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_ngpu_pages() 108 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE; in radeon_bo_gpu_page_alignment() 119 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in radeon_bo_mmap_offset()
|
D | radeon_gem.c | 38 drm_prime_gem_destroy(&robj->gem_base, robj->tbo.sg); in radeon_gem_object_free() 114 r = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, 30 * HZ); in radeon_gem_set_domain() 323 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl() 342 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_gem_userptr_ioctl() 412 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) { in radeon_mode_dumb_mmap() 444 r = reservation_object_test_signaled_rcu(robj->tbo.resv, true); in radeon_gem_busy_ioctl() 450 cur_placement = ACCESS_ONCE(robj->tbo.mem.mem_type); in radeon_gem_busy_ioctl() 473 ret = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, 30 * HZ); in radeon_gem_wait_idle_ioctl() 480 cur_placement = ACCESS_ONCE(robj->tbo.mem.mem_type); in radeon_gem_wait_idle_ioctl() 551 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm() [all …]
|
D | radeon_mn.c | 145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start() 154 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in radeon_mn_invalidate_range_start() 160 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in radeon_mn_invalidate_range_start()
|
D | radeon_cs.c | 154 if (radeon_ttm_tt_has_userptr(p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs() 167 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs() 241 resv = reloc->robj->tbo.resv; in radeon_cs_sync_rings() 385 return (int)la->robj->tbo.num_pages - (int)lb->robj->tbo.num_pages; in cmp_size_smaller_first() 499 &rdev->ring_tmp_bo.bo->tbo.mem); in radeon_bo_vm_update_pte() 513 r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem); in radeon_bo_vm_update_pte()
|
D | radeon_benchmark.c | 125 dobj->tbo.resv); in radeon_benchmark_move() 136 dobj->tbo.resv); in radeon_benchmark_move()
|
D | radeon_test.c | 122 vram_obj->tbo.resv); in radeon_do_test_moves() 126 vram_obj->tbo.resv); in radeon_do_test_moves() 173 vram_obj->tbo.resv); in radeon_do_test_moves() 177 vram_obj->tbo.resv); in radeon_do_test_moves()
|
D | radeon_vm.c | 144 list[0].tv.bo = &vm->page_directory->tbo; in radeon_vm_get_bos() 156 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos() 399 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_vm_clear_bo() 703 radeon_sync_resv(rdev, &ib.sync, pd->tbo.resv, true); in radeon_vm_update_page_directory() 831 radeon_sync_resv(rdev, &ib->sync, pt->tbo.resv, true); in radeon_vm_update_ptes() 832 r = reservation_object_reserve_shared(pt->tbo.resv); in radeon_vm_update_ptes() 943 if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
|
D | radeon_trace.h | 24 __entry->pages = bo->tbo.num_pages;
|
D | radeon_ttm.c | 196 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags() 236 struct radeon_bo *rbo = container_of(bo, struct radeon_bo, tbo); in radeon_verify_access()
|
D | radeon_uvd.c | 436 f = reservation_object_get_excl(bo->tbo.resv); in radeon_uvd_cs_msg()
|
D | radeon_pm.c | 152 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_unmap_vram_bos() 153 ttm_bo_unmap_virtual(&bo->tbo); in radeon_unmap_vram_bos()
|
D | radeon_display.c | 559 work->fence = fence_get(reservation_object_get_excl(new_rbo->tbo.resv)); in radeon_crtc_page_flip()
|
D | radeon.h | 490 struct ttm_buffer_object tbo; member
|
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_prime.c | 35 int npages = bo->tbo.num_pages; in amdgpu_gem_prime_get_sg_table() 37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in amdgpu_gem_prime_get_sg_table() 45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in amdgpu_gem_prime_vmap() 115 return bo->tbo.resv; in amdgpu_gem_prime_res_obj() 124 if (amdgpu_ttm_tt_has_userptr(bo->tbo.ttm)) in amdgpu_gem_prime_export()
|
D | amdgpu_object.c | 92 static void amdgpu_ttm_bo_destroy(struct ttm_buffer_object *tbo) in amdgpu_ttm_bo_destroy() argument 96 bo = container_of(tbo, struct amdgpu_bo, tbo); in amdgpu_ttm_bo_destroy() 98 amdgpu_update_memory_usage(bo->adev, &bo->tbo.mem, NULL); in amdgpu_ttm_bo_destroy() 274 r = ttm_bo_init(&adev->mman.bdev, &bo->tbo, size, type, in amdgpu_bo_create_restricted() 322 r = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, &bo->kmap); in amdgpu_bo_kmap() 346 ttm_bo_reference(&bo->tbo); in amdgpu_bo_ref() 352 struct ttm_buffer_object *tbo; in amdgpu_bo_unref() local 357 tbo = &((*bo)->tbo); in amdgpu_bo_unref() 358 ttm_bo_unref(&tbo); in amdgpu_bo_unref() 359 if (tbo == NULL) in amdgpu_bo_unref() [all …]
|
D | amdgpu_object.h | 74 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, false, 0); in amdgpu_bo_reserve() 85 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve() 99 return bo->tbo.offset; in amdgpu_bo_gpu_offset() 104 return bo->tbo.num_pages << PAGE_SHIFT; in amdgpu_bo_size() 109 return (bo->tbo.num_pages << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_ngpu_pages() 114 return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE; in amdgpu_bo_gpu_page_alignment() 125 return drm_vma_node_offset_addr(&bo->tbo.vma_node); in amdgpu_bo_mmap_offset()
|
D | amdgpu_gem.c | 39 drm_prime_gem_destroy(&robj->gem_base, robj->tbo.sg); in amdgpu_gem_object_free() 255 r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in amdgpu_gem_userptr_ioctl() 274 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_gem_userptr_ioctl() 311 if (amdgpu_ttm_tt_has_userptr(robj->tbo.ttm) || in amdgpu_mode_dumb_mmap() 376 ret = reservation_object_test_signaled_rcu(robj->tbo.resv, true); in amdgpu_gem_wait_idle_ioctl() 378 ret = reservation_object_wait_timeout_rcu(robj->tbo.resv, true, true, timeout); in amdgpu_gem_wait_idle_ioctl() 460 tv.bo = &bo_va->bo->tbo; in amdgpu_gem_va_update_vm() 497 r = amdgpu_vm_bo_update(adev, bo_va, &bo_va->bo->tbo.mem); in amdgpu_gem_va_update_vm() 561 tv.bo = &rbo->tbo; in amdgpu_gem_va_ioctl() 566 tv_pd.bo = &fpriv->vm.page_directory->tbo; in amdgpu_gem_va_ioctl() [all …]
|
D | amdgpu_mn.c | 145 if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, in amdgpu_mn_invalidate_range_start() 155 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in amdgpu_mn_invalidate_range_start() 161 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in amdgpu_mn_invalidate_range_start()
|
D | amdgpu_cs.c | 145 if (amdgpu_ttm_tt_has_userptr(p->uf.bo->tbo.ttm)) { in amdgpu_cs_user_fence_chunk() 154 p->uf_entry.tv.bo = &p->uf_entry.robj->tbo; in amdgpu_cs_user_fence_chunk() 354 amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type); in amdgpu_cs_list_validate() 374 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_cs_list_validate() 446 struct reservation_object *resv = e->robj->tbo.resv; in amdgpu_cs_sync_rings() 462 return (int)la->robj->tbo.num_pages - (int)lb->robj->tbo.num_pages; in cmp_size_smaller_first() 549 r = amdgpu_vm_bo_update(adev, bo_va, &bo->tbo.mem); in amdgpu_bo_vm_update_pte()
|
D | amdgpu_vm.c | 104 list[0].tv.bo = &vm->page_directory->tbo; in amdgpu_vm_get_bos() 116 list[idx].tv.bo = &list[idx].robj->tbo; in amdgpu_vm_get_bos() 348 r = reservation_object_reserve_shared(bo->tbo.resv); in amdgpu_vm_clear_bo() 352 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_vm_clear_bo() 499 amdgpu_sync_resv(adev, &ib->sync, pd->tbo.resv, AMDGPU_FENCE_OWNER_VM); in amdgpu_vm_update_page_directory() 646 amdgpu_sync_resv(adev, &ib->sync, pt->tbo.resv, owner); in amdgpu_vm_update_ptes() 647 r = reservation_object_reserve_shared(pt->tbo.resv); in amdgpu_vm_update_ptes() 839 flags = amdgpu_ttm_tt_pte_flags(adev, bo_va->bo->tbo.ttm, mem); in amdgpu_vm_bo_update() 1072 struct reservation_object *resv = vm->page_directory->tbo.resv; in amdgpu_vm_bo_map()
|
D | amdgpu_bo_list.c | 114 if (amdgpu_ttm_tt_has_userptr(entry->robj->tbo.ttm)) { in amdgpu_bo_list_set() 119 entry->tv.bo = &entry->robj->tbo; in amdgpu_bo_list_set()
|
D | amdgpu_uvd.c | 363 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in amdgpu_uvd_cs_pass1() 533 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, true, false, in amdgpu_uvd_cs_msg() 853 tv.bo = &bo->tbo; in amdgpu_uvd_send_msg() 867 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_uvd_send_msg()
|
D | amdgpu_trace.h | 24 __entry->pages = bo->tbo.num_pages;
|
D | amdgpu_ttm.c | 191 rbo = container_of(bo, struct amdgpu_bo, tbo); in amdgpu_evict_flags() 208 struct amdgpu_bo *rbo = container_of(bo, struct amdgpu_bo, tbo); in amdgpu_verify_access()
|
D | amdgpu_display.c | 222 r = reservation_object_get_fences_rcu(new_rbo->tbo.resv, &work->excl, in amdgpu_crtc_page_flip()
|
D | amdgpu.h | 526 struct ttm_buffer_object tbo; member
|
/linux-4.4.14/drivers/gpu/drm/bochs/ |
D | bochs_mm.c | 74 static void bochs_bo_ttm_destroy(struct ttm_buffer_object *tbo) in bochs_bo_ttm_destroy() argument 78 bo = container_of(tbo, struct bochs_bo, bo); in bochs_bo_ttm_destroy() 436 struct ttm_buffer_object *tbo; in bochs_bo_unref() local 441 tbo = &((*bo)->bo); in bochs_bo_unref() 442 ttm_bo_unref(&tbo); in bochs_bo_unref()
|
/linux-4.4.14/drivers/gpu/drm/cirrus/ |
D | cirrus_main.c | 266 struct ttm_buffer_object *tbo; in cirrus_bo_unref() local 271 tbo = &((*bo)->bo); in cirrus_bo_unref() 272 ttm_bo_unref(&tbo); in cirrus_bo_unref()
|
D | cirrus_ttm.c | 95 static void cirrus_bo_ttm_destroy(struct ttm_buffer_object *tbo) in cirrus_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct cirrus_bo, bo); in cirrus_bo_ttm_destroy()
|
/linux-4.4.14/drivers/gpu/drm/mgag200/ |
D | mgag200_main.c | 329 struct ttm_buffer_object *tbo; in mgag200_bo_unref() local 334 tbo = &((*bo)->bo); in mgag200_bo_unref() 335 ttm_bo_unref(&tbo); in mgag200_bo_unref()
|
D | mgag200_ttm.c | 95 static void mgag200_bo_ttm_destroy(struct ttm_buffer_object *tbo) in mgag200_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct mgag200_bo, bo); in mgag200_bo_ttm_destroy()
|
/linux-4.4.14/drivers/gpu/drm/ast/ |
D | ast_main.c | 546 struct ttm_buffer_object *tbo; in ast_bo_unref() local 551 tbo = &((*bo)->bo); in ast_bo_unref() 552 ttm_bo_unref(&tbo); in ast_bo_unref()
|
D | ast_ttm.c | 95 static void ast_bo_ttm_destroy(struct ttm_buffer_object *tbo) in ast_bo_ttm_destroy() argument 99 bo = container_of(tbo, struct ast_bo, bo); in ast_bo_ttm_destroy()
|