otables           424 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 	struct vmw_otable *otables;
otables           240 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct vmw_otable *otables = batch->otables;
otables           250 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (!otables[i].enabled)
otables           253 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		otables[i].size =
otables           254 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 			(otables[i].size + PAGE_SIZE - 1) & PAGE_MASK;
otables           255 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		bo_size += otables[i].size;
otables           279 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (!batch->otables[i].enabled)
otables           284 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 					    &otables[i]);
otables           287 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		offset += otables[i].size;
otables           296 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (batch->otables[i].enabled)
otables           298 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 						 &batch->otables[i]);
otables           320 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct vmw_otable **otables = &dev_priv->otable_batch.otables;
otables           324 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		*otables = kmemdup(dx_tables, sizeof(dx_tables), GFP_KERNEL);
otables           325 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (!(*otables))
otables           330 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		*otables = kmemdup(pre_dx_tables, sizeof(pre_dx_tables),
otables           332 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (!(*otables))
otables           345 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	kfree(*otables);
otables           357 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (batch->otables[i].enabled)
otables           359 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 						 &batch->otables[i]);
otables           381 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	kfree(dev_priv->otable_batch.otables);