Lines Matching refs:mob
90 struct vmw_mob *mob);
91 static void vmw_mob_pt_setup(struct vmw_mob *mob,
117 struct vmw_mob *mob; in vmw_setup_otable_base() local
128 mob = vmw_mob_create(otable->size >> PAGE_SHIFT); in vmw_setup_otable_base()
129 if (unlikely(mob == NULL)) { in vmw_setup_otable_base()
135 mob->pt_level = VMW_MOBFMT_PTDEPTH_0; in vmw_setup_otable_base()
136 mob->pt_root_page = vmw_piter_dma_addr(&iter); in vmw_setup_otable_base()
138 mob->pt_level = SVGA3D_MOBFMT_RANGE; in vmw_setup_otable_base()
139 mob->pt_root_page = vmw_piter_dma_addr(&iter); in vmw_setup_otable_base()
141 ret = vmw_mob_pt_populate(dev_priv, mob); in vmw_setup_otable_base()
145 vmw_mob_pt_setup(mob, iter, otable->size >> PAGE_SHIFT); in vmw_setup_otable_base()
146 mob->pt_level += VMW_MOBFMT_PTDEPTH_1 - SVGA3D_MOBFMT_PTDEPTH_1; in vmw_setup_otable_base()
160 cmd->body.baseAddress = mob->pt_root_page >> PAGE_SHIFT; in vmw_setup_otable_base()
163 cmd->body.ptDepth = mob->pt_level; in vmw_setup_otable_base()
170 BUG_ON(mob->pt_level == VMW_MOBFMT_PTDEPTH_2); in vmw_setup_otable_base()
173 otable->page_table = mob; in vmw_setup_otable_base()
179 vmw_mob_destroy(mob); in vmw_setup_otable_base()
409 struct vmw_mob *mob = kzalloc(sizeof(*mob), GFP_KERNEL); in vmw_mob_create() local
411 if (unlikely(mob == NULL)) in vmw_mob_create()
414 mob->num_pages = vmw_mob_calculate_pt_pages(data_pages); in vmw_mob_create()
416 return mob; in vmw_mob_create()
431 struct vmw_mob *mob) in vmw_mob_pt_populate() argument
434 BUG_ON(mob->pt_bo != NULL); in vmw_mob_pt_populate()
436 ret = ttm_bo_create(&dev_priv->bdev, mob->num_pages * PAGE_SIZE, in vmw_mob_pt_populate()
439 0, false, NULL, &mob->pt_bo); in vmw_mob_pt_populate()
443 ret = ttm_bo_reserve(mob->pt_bo, false, true, false, NULL); in vmw_mob_pt_populate()
446 ret = vmw_bo_driver.ttm_tt_populate(mob->pt_bo->ttm); in vmw_mob_pt_populate()
449 ret = vmw_bo_map_dma(mob->pt_bo); in vmw_mob_pt_populate()
453 ttm_bo_unreserve(mob->pt_bo); in vmw_mob_pt_populate()
458 ttm_bo_unreserve(mob->pt_bo); in vmw_mob_pt_populate()
459 ttm_bo_unref(&mob->pt_bo); in vmw_mob_pt_populate()
537 static void vmw_mob_pt_setup(struct vmw_mob *mob, in vmw_mob_pt_setup() argument
542 struct ttm_buffer_object *bo = mob->pt_bo; in vmw_mob_pt_setup()
554 mob->pt_level = 0; in vmw_mob_pt_setup()
556 ++mob->pt_level; in vmw_mob_pt_setup()
557 BUG_ON(mob->pt_level > 2); in vmw_mob_pt_setup()
565 mob->pt_root_page = vmw_piter_dma_addr(&save_pt_iter); in vmw_mob_pt_setup()
574 void vmw_mob_destroy(struct vmw_mob *mob) in vmw_mob_destroy() argument
576 if (mob->pt_bo) in vmw_mob_destroy()
577 ttm_bo_unref(&mob->pt_bo); in vmw_mob_destroy()
578 kfree(mob); in vmw_mob_destroy()
588 struct vmw_mob *mob) in vmw_mob_unbind() argument
595 struct ttm_buffer_object *bo = mob->pt_bo; in vmw_mob_unbind()
612 cmd->body.mobid = mob->id; in vmw_mob_unbind()
638 struct vmw_mob *mob, in vmw_mob_bind() argument
651 mob->id = mob_id; in vmw_mob_bind()
657 mob->pt_level = VMW_MOBFMT_PTDEPTH_0; in vmw_mob_bind()
658 mob->pt_root_page = vmw_piter_dma_addr(&data_iter); in vmw_mob_bind()
660 mob->pt_level = SVGA3D_MOBFMT_RANGE; in vmw_mob_bind()
661 mob->pt_root_page = vmw_piter_dma_addr(&data_iter); in vmw_mob_bind()
662 } else if (unlikely(mob->pt_bo == NULL)) { in vmw_mob_bind()
663 ret = vmw_mob_pt_populate(dev_priv, mob); in vmw_mob_bind()
667 vmw_mob_pt_setup(mob, data_iter, num_data_pages); in vmw_mob_bind()
669 mob->pt_level += VMW_MOBFMT_PTDEPTH_1 - SVGA3D_MOBFMT_PTDEPTH_1; in vmw_mob_bind()
684 cmd->body.ptDepth = mob->pt_level; in vmw_mob_bind()
685 cmd->body.base = mob->pt_root_page >> PAGE_SHIFT; in vmw_mob_bind()
695 ttm_bo_unref(&mob->pt_bo); in vmw_mob_bind()