Lines Matching refs:bo
74 vmw_dma_buffer(struct ttm_buffer_object *bo) in vmw_dma_buffer() argument
76 return container_of(bo, struct vmw_dma_buffer, base); in vmw_dma_buffer()
80 vmw_user_dma_buffer(struct ttm_buffer_object *bo) in vmw_user_dma_buffer() argument
82 struct vmw_dma_buffer *vmw_bo = vmw_dma_buffer(bo); in vmw_user_dma_buffer()
130 struct ttm_buffer_object *bo = &res->backup->base; in vmw_resource_release() local
132 ttm_bo_reserve(bo, false, false, false, NULL); in vmw_resource_release()
137 val_buf.bo = bo; in vmw_resource_release()
143 ttm_bo_unreserve(bo); in vmw_resource_release()
396 void vmw_dmabuf_bo_free(struct ttm_buffer_object *bo) in vmw_dmabuf_bo_free() argument
398 struct vmw_dma_buffer *vmw_bo = vmw_dma_buffer(bo); in vmw_dmabuf_bo_free()
403 static void vmw_user_dmabuf_destroy(struct ttm_buffer_object *bo) in vmw_user_dmabuf_destroy() argument
405 struct vmw_user_dma_buffer *vmw_user_bo = vmw_user_dma_buffer(bo); in vmw_user_dmabuf_destroy()
414 void (*bo_free) (struct ttm_buffer_object *bo)) in vmw_dmabuf_init()
439 struct ttm_buffer_object *bo; in vmw_user_dmabuf_release() local
448 bo = &vmw_user_bo->dma.base; in vmw_user_dmabuf_release()
449 ttm_bo_unref(&bo); in vmw_user_dmabuf_release()
536 int vmw_user_dmabuf_verify_access(struct ttm_buffer_object *bo, in vmw_user_dmabuf_verify_access() argument
541 if (unlikely(bo->destroy != vmw_user_dmabuf_destroy)) in vmw_user_dmabuf_verify_access()
544 vmw_user_bo = vmw_user_dma_buffer(bo); in vmw_user_dmabuf_verify_access()
569 struct ttm_buffer_object *bo = &user_bo->dma.base; in vmw_user_dmabuf_synccpu_grab() local
578 return reservation_object_test_signaled_rcu(bo->resv, true) ? 0 : -EBUSY; in vmw_user_dmabuf_synccpu_grab()
580 lret = reservation_object_wait_timeout_rcu(bo->resv, true, true, MAX_SCHEDULE_TIMEOUT); in vmw_user_dmabuf_synccpu_grab()
589 (bo, !!(flags & drm_vmw_synccpu_dontblock)); in vmw_user_dmabuf_synccpu_grab()
1136 val_buf->bo != NULL) || in vmw_resource_do_validate()
1137 (!func->needs_backup && val_buf->bo != NULL))) { in vmw_resource_do_validate()
1238 val_buf->bo = ttm_bo_reference(&res->backup->base); in vmw_resource_check_buffer()
1261 ttm_bo_unref(&val_buf->bo); in vmw_resource_check_buffer()
1313 if (likely(val_buf->bo == NULL)) in vmw_resource_backoff_reservation()
1319 ttm_bo_unref(&val_buf->bo); in vmw_resource_backoff_reservation()
1337 val_buf.bo = NULL; in vmw_resource_do_evict()
1383 val_buf.bo = NULL; in vmw_resource_validate()
1386 val_buf.bo = &res->backup->base; in vmw_resource_validate()
1448 void vmw_fence_single_bo(struct ttm_buffer_object *bo, in vmw_fence_single_bo() argument
1451 struct ttm_bo_device *bdev = bo->bdev; in vmw_fence_single_bo()
1458 reservation_object_add_excl_fence(bo->resv, &fence->base); in vmw_fence_single_bo()
1461 reservation_object_add_excl_fence(bo->resv, &fence->base); in vmw_fence_single_bo()
1483 void vmw_resource_move_notify(struct ttm_buffer_object *bo, in vmw_resource_move_notify() argument
1491 if (bo->destroy != vmw_dmabuf_bo_free && in vmw_resource_move_notify()
1492 bo->destroy != vmw_user_dmabuf_destroy) in vmw_resource_move_notify()
1495 dma_buf = container_of(bo, struct vmw_dma_buffer, base); in vmw_resource_move_notify()
1501 val_buf.bo = bo; in vmw_resource_move_notify()
1515 (void) ttm_bo_wait(bo, false, false, false); in vmw_resource_move_notify()
1576 void vmw_query_move_notify(struct ttm_buffer_object *bo, in vmw_query_move_notify() argument
1580 struct ttm_bo_device *bdev = bo->bdev; in vmw_query_move_notify()
1588 dx_query_mob = container_of(bo, struct vmw_dma_buffer, base); in vmw_query_move_notify()
1595 if (mem->mem_type == TTM_PL_SYSTEM && bo->mem.mem_type == VMW_PL_MOB) { in vmw_query_move_notify()
1603 vmw_fence_single_bo(bo, fence); in vmw_query_move_notify()
1608 (void) ttm_bo_wait(bo, false, false, false); in vmw_query_move_notify()