Lines Matching refs:backup

127 	if (res->backup) {  in vmw_resource_release()
128 struct ttm_buffer_object *bo = &res->backup->base; in vmw_resource_release()
142 vmw_dmabuf_unreference(&res->backup); in vmw_resource_release()
230 res->backup = NULL; in vmw_resource_init()
1088 struct vmw_dma_buffer *backup; in vmw_resource_buf_alloc() local
1091 if (likely(res->backup)) { in vmw_resource_buf_alloc()
1092 BUG_ON(res->backup->base.num_pages * PAGE_SIZE < size); in vmw_resource_buf_alloc()
1096 backup = kzalloc(sizeof(*backup), GFP_KERNEL); in vmw_resource_buf_alloc()
1097 if (unlikely(backup == NULL)) in vmw_resource_buf_alloc()
1100 ret = vmw_dmabuf_init(res->dev_priv, backup, res->backup_size, in vmw_resource_buf_alloc()
1107 res->backup = backup; in vmw_resource_buf_alloc()
1144 list_add_tail(&res->mob_head, &res->backup->res_list); in vmw_resource_do_validate()
1184 if (new_backup && new_backup != res->backup) { in vmw_resource_unreserve()
1186 if (res->backup) { in vmw_resource_unreserve()
1187 lockdep_assert_held(&res->backup->base.resv->lock.base); in vmw_resource_unreserve()
1189 vmw_dmabuf_unreference(&res->backup); in vmw_resource_unreserve()
1192 res->backup = vmw_dmabuf_reference(new_backup); in vmw_resource_unreserve()
1228 if (unlikely(res->backup == NULL)) { in vmw_resource_check_buffer()
1235 val_buf->bo = ttm_bo_reference(&res->backup->base); in vmw_resource_check_buffer()
1246 ret = ttm_bo_validate(&res->backup->base, in vmw_resource_check_buffer()
1260 vmw_dmabuf_unreference(&res->backup); in vmw_resource_check_buffer()
1284 if (res->func->needs_backup && res->backup == NULL && in vmw_resource_reserve()
1377 if (res->backup) in vmw_resource_validate()
1378 val_buf.bo = &res->backup->base; in vmw_resource_validate()
1417 else if (!res->func->needs_backup && res->backup) { in vmw_resource_validate()
1419 vmw_dmabuf_unreference(&res->backup); in vmw_resource_validate()