Lines Matching refs:vgdev

41 void virtio_gpu_resource_id_get(struct virtio_gpu_device *vgdev,  in virtio_gpu_resource_id_get()  argument
47 spin_lock(&vgdev->resource_idr_lock); in virtio_gpu_resource_id_get()
48 handle = idr_alloc(&vgdev->resource_idr, NULL, 1, 0, GFP_NOWAIT); in virtio_gpu_resource_id_get()
49 spin_unlock(&vgdev->resource_idr_lock); in virtio_gpu_resource_id_get()
54 void virtio_gpu_resource_id_put(struct virtio_gpu_device *vgdev, uint32_t id) in virtio_gpu_resource_id_put() argument
56 spin_lock(&vgdev->resource_idr_lock); in virtio_gpu_resource_id_put()
57 idr_remove(&vgdev->resource_idr, id); in virtio_gpu_resource_id_put()
58 spin_unlock(&vgdev->resource_idr_lock); in virtio_gpu_resource_id_put()
64 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_ctrl_ack() local
65 schedule_work(&vgdev->ctrlq.dequeue_work); in virtio_gpu_ctrl_ack()
71 struct virtio_gpu_device *vgdev = dev->dev_private; in virtio_gpu_cursor_ack() local
72 schedule_work(&vgdev->cursorq.dequeue_work); in virtio_gpu_cursor_ack()
75 int virtio_gpu_alloc_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_alloc_vbufs() argument
81 INIT_LIST_HEAD(&vgdev->free_vbufs); in virtio_gpu_alloc_vbufs()
82 spin_lock_init(&vgdev->free_vbufs_lock); in virtio_gpu_alloc_vbufs()
83 count += virtqueue_get_vring_size(vgdev->ctrlq.vq); in virtio_gpu_alloc_vbufs()
84 count += virtqueue_get_vring_size(vgdev->cursorq.vq); in virtio_gpu_alloc_vbufs()
89 vgdev->vbufs = kzalloc(size, GFP_KERNEL); in virtio_gpu_alloc_vbufs()
90 if (!vgdev->vbufs) in virtio_gpu_alloc_vbufs()
93 for (i = 0, ptr = vgdev->vbufs; in virtio_gpu_alloc_vbufs()
97 list_add(&vbuf->list, &vgdev->free_vbufs); in virtio_gpu_alloc_vbufs()
102 void virtio_gpu_free_vbufs(struct virtio_gpu_device *vgdev) in virtio_gpu_free_vbufs() argument
107 count += virtqueue_get_vring_size(vgdev->ctrlq.vq); in virtio_gpu_free_vbufs()
108 count += virtqueue_get_vring_size(vgdev->cursorq.vq); in virtio_gpu_free_vbufs()
110 spin_lock(&vgdev->free_vbufs_lock); in virtio_gpu_free_vbufs()
112 if (WARN_ON(list_empty(&vgdev->free_vbufs))) in virtio_gpu_free_vbufs()
114 vbuf = list_first_entry(&vgdev->free_vbufs, in virtio_gpu_free_vbufs()
118 spin_unlock(&vgdev->free_vbufs_lock); in virtio_gpu_free_vbufs()
119 kfree(vgdev->vbufs); in virtio_gpu_free_vbufs()
123 virtio_gpu_get_vbuf(struct virtio_gpu_device *vgdev, in virtio_gpu_get_vbuf() argument
129 spin_lock(&vgdev->free_vbufs_lock); in virtio_gpu_get_vbuf()
130 BUG_ON(list_empty(&vgdev->free_vbufs)); in virtio_gpu_get_vbuf()
131 vbuf = list_first_entry(&vgdev->free_vbufs, in virtio_gpu_get_vbuf()
134 spin_unlock(&vgdev->free_vbufs_lock); in virtio_gpu_get_vbuf()
151 static void *virtio_gpu_alloc_cmd(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd() argument
157 vbuf = virtio_gpu_get_vbuf(vgdev, size, in virtio_gpu_alloc_cmd()
169 virtio_gpu_alloc_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cursor() argument
175 (vgdev, sizeof(struct virtio_gpu_update_cursor), in virtio_gpu_alloc_cursor()
185 static void *virtio_gpu_alloc_cmd_resp(struct virtio_gpu_device *vgdev, in virtio_gpu_alloc_cmd_resp() argument
193 vbuf = virtio_gpu_get_vbuf(vgdev, cmd_size, in virtio_gpu_alloc_cmd_resp()
203 static void free_vbuf(struct virtio_gpu_device *vgdev, in free_vbuf() argument
209 spin_lock(&vgdev->free_vbufs_lock); in free_vbuf()
210 list_add(&vbuf->list, &vgdev->free_vbufs); in free_vbuf()
211 spin_unlock(&vgdev->free_vbufs_lock); in free_vbuf()
230 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_ctrl_func() local
239 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
241 virtqueue_disable_cb(vgdev->ctrlq.vq); in virtio_gpu_dequeue_ctrl_func()
242 reclaim_vbufs(vgdev->ctrlq.vq, &reclaim_list); in virtio_gpu_dequeue_ctrl_func()
244 } while (!virtqueue_enable_cb(vgdev->ctrlq.vq)); in virtio_gpu_dequeue_ctrl_func()
245 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_dequeue_ctrl_func()
262 entry->resp_cb(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
265 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_ctrl_func()
267 wake_up(&vgdev->ctrlq.ack_queue); in virtio_gpu_dequeue_ctrl_func()
270 virtio_gpu_fence_event_process(vgdev, fence_id); in virtio_gpu_dequeue_ctrl_func()
275 struct virtio_gpu_device *vgdev = in virtio_gpu_dequeue_cursor_func() local
282 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
284 virtqueue_disable_cb(vgdev->cursorq.vq); in virtio_gpu_dequeue_cursor_func()
285 reclaim_vbufs(vgdev->cursorq.vq, &reclaim_list); in virtio_gpu_dequeue_cursor_func()
286 } while (!virtqueue_enable_cb(vgdev->cursorq.vq)); in virtio_gpu_dequeue_cursor_func()
287 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_dequeue_cursor_func()
291 free_vbuf(vgdev, entry); in virtio_gpu_dequeue_cursor_func()
293 wake_up(&vgdev->cursorq.ack_queue); in virtio_gpu_dequeue_cursor_func()
296 static int virtio_gpu_queue_ctrl_buffer_locked(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer_locked() argument
299 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_ctrl_buffer_locked()
304 if (!vgdev->vqs_ready) in virtio_gpu_queue_ctrl_buffer_locked()
326 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer_locked()
327 wait_event(vgdev->ctrlq.ack_queue, vq->num_free); in virtio_gpu_queue_ctrl_buffer_locked()
328 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer_locked()
339 static int virtio_gpu_queue_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_ctrl_buffer() argument
344 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer()
345 rc = virtio_gpu_queue_ctrl_buffer_locked(vgdev, vbuf); in virtio_gpu_queue_ctrl_buffer()
346 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_ctrl_buffer()
350 static int virtio_gpu_queue_fenced_ctrl_buffer(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_fenced_ctrl_buffer() argument
355 struct virtqueue *vq = vgdev->ctrlq.vq; in virtio_gpu_queue_fenced_ctrl_buffer()
359 spin_lock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_fenced_ctrl_buffer()
370 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_fenced_ctrl_buffer()
371 wait_event(vgdev->ctrlq.ack_queue, vq->num_free >= 3); in virtio_gpu_queue_fenced_ctrl_buffer()
376 virtio_gpu_fence_emit(vgdev, hdr, fence); in virtio_gpu_queue_fenced_ctrl_buffer()
377 rc = virtio_gpu_queue_ctrl_buffer_locked(vgdev, vbuf); in virtio_gpu_queue_fenced_ctrl_buffer()
378 spin_unlock(&vgdev->ctrlq.qlock); in virtio_gpu_queue_fenced_ctrl_buffer()
382 static int virtio_gpu_queue_cursor(struct virtio_gpu_device *vgdev, in virtio_gpu_queue_cursor() argument
385 struct virtqueue *vq = vgdev->cursorq.vq; in virtio_gpu_queue_cursor()
390 if (!vgdev->vqs_ready) in virtio_gpu_queue_cursor()
397 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
401 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
402 wait_event(vgdev->cursorq.ack_queue, vq->num_free); in virtio_gpu_queue_cursor()
403 spin_lock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
409 spin_unlock(&vgdev->cursorq.qlock); in virtio_gpu_queue_cursor()
420 void virtio_gpu_cmd_create_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_create_resource() argument
429 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_create_resource()
438 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_create_resource()
441 void virtio_gpu_cmd_unref_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_unref_resource() argument
447 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_unref_resource()
453 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_unref_resource()
456 void virtio_gpu_cmd_resource_inval_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_inval_backing() argument
462 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_inval_backing()
468 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_inval_backing()
471 void virtio_gpu_cmd_set_scanout(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_set_scanout() argument
479 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_set_scanout()
490 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_set_scanout()
493 void virtio_gpu_cmd_resource_flush(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_flush() argument
501 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_flush()
511 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_resource_flush()
514 void virtio_gpu_cmd_transfer_to_host_2d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_2d() argument
523 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_2d()
534 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_transfer_to_host_2d()
538 virtio_gpu_cmd_resource_attach_backing(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_attach_backing() argument
547 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_attach_backing()
557 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_resource_attach_backing()
560 static void virtio_gpu_cmd_get_display_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_display_info_cb() argument
567 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
568 for (i = 0; i < vgdev->num_scanouts; i++) { in virtio_gpu_cmd_get_display_info_cb()
569 vgdev->outputs[i].info = resp->pmodes[i]; in virtio_gpu_cmd_get_display_info_cb()
581 vgdev->display_info_pending = false; in virtio_gpu_cmd_get_display_info_cb()
582 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_display_info_cb()
583 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_display_info_cb()
585 if (!drm_helper_hpd_irq_event(vgdev->ddev)) in virtio_gpu_cmd_get_display_info_cb()
586 drm_kms_helper_hotplug_event(vgdev->ddev); in virtio_gpu_cmd_get_display_info_cb()
589 static void virtio_gpu_cmd_get_capset_info_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset_info_cb() argument
598 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
599 vgdev->capsets[i].id = le32_to_cpu(resp->capset_id); in virtio_gpu_cmd_get_capset_info_cb()
600 vgdev->capsets[i].max_version = le32_to_cpu(resp->capset_max_version); in virtio_gpu_cmd_get_capset_info_cb()
601 vgdev->capsets[i].max_size = le32_to_cpu(resp->capset_max_size); in virtio_gpu_cmd_get_capset_info_cb()
602 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset_info_cb()
603 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_get_capset_info_cb()
606 static void virtio_gpu_cmd_capset_cb(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_capset_cb() argument
615 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
616 list_for_each_entry(cache_ent, &vgdev->cap_cache, head) { in virtio_gpu_cmd_capset_cb()
625 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_capset_cb()
626 wake_up(&vgdev->resp_wq); in virtio_gpu_cmd_capset_cb()
630 int virtio_gpu_cmd_get_display_info(struct virtio_gpu_device *vgdev) in virtio_gpu_cmd_get_display_info() argument
642 (vgdev, &virtio_gpu_cmd_get_display_info_cb, &vbuf, in virtio_gpu_cmd_get_display_info()
647 vgdev->display_info_pending = true; in virtio_gpu_cmd_get_display_info()
649 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_display_info()
653 int virtio_gpu_cmd_get_capset_info(struct virtio_gpu_device *vgdev, int idx) in virtio_gpu_cmd_get_capset_info() argument
665 (vgdev, &virtio_gpu_cmd_get_capset_info_cb, &vbuf, in virtio_gpu_cmd_get_capset_info()
672 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset_info()
676 int virtio_gpu_cmd_get_capset(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_get_capset() argument
682 int max_size = vgdev->capsets[idx].max_size; in virtio_gpu_cmd_get_capset()
686 if (idx > vgdev->num_capsets) in virtio_gpu_cmd_get_capset()
689 if (version > vgdev->capsets[idx].max_version) in virtio_gpu_cmd_get_capset()
711 cache_ent->id = vgdev->capsets[idx].id; in virtio_gpu_cmd_get_capset()
714 spin_lock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
715 list_add_tail(&cache_ent->head, &vgdev->cap_cache); in virtio_gpu_cmd_get_capset()
716 spin_unlock(&vgdev->display_info_lock); in virtio_gpu_cmd_get_capset()
719 (vgdev, &virtio_gpu_cmd_capset_cb, &vbuf, sizeof(*cmd_p), in virtio_gpu_cmd_get_capset()
723 cmd_p->capset_id = cpu_to_le32(vgdev->capsets[idx].id); in virtio_gpu_cmd_get_capset()
726 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_get_capset()
731 void virtio_gpu_cmd_context_create(struct virtio_gpu_device *vgdev, uint32_t id, in virtio_gpu_cmd_context_create() argument
737 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_create()
745 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_create()
748 void virtio_gpu_cmd_context_destroy(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_destroy() argument
754 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_destroy()
759 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_destroy()
762 void virtio_gpu_cmd_context_attach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_attach_resource() argument
769 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_attach_resource()
775 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_attach_resource()
779 void virtio_gpu_cmd_context_detach_resource(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_context_detach_resource() argument
786 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_context_detach_resource()
792 virtio_gpu_queue_ctrl_buffer(vgdev, vbuf); in virtio_gpu_cmd_context_detach_resource()
796 virtio_gpu_cmd_resource_create_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_resource_create_3d() argument
803 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_resource_create_3d()
810 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_resource_create_3d()
813 void virtio_gpu_cmd_transfer_to_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_to_host_3d() argument
822 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_to_host_3d()
832 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_transfer_to_host_3d()
835 void virtio_gpu_cmd_transfer_from_host_3d(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_transfer_from_host_3d() argument
844 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_transfer_from_host_3d()
854 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_transfer_from_host_3d()
857 void virtio_gpu_cmd_submit(struct virtio_gpu_device *vgdev, in virtio_gpu_cmd_submit() argument
864 cmd_p = virtio_gpu_alloc_cmd(vgdev, &vbuf, sizeof(*cmd_p)); in virtio_gpu_cmd_submit()
874 virtio_gpu_queue_fenced_ctrl_buffer(vgdev, vbuf, &cmd_p->hdr, fence); in virtio_gpu_cmd_submit()
877 int virtio_gpu_object_attach(struct virtio_gpu_device *vgdev, in virtio_gpu_object_attach() argument
888 ret = virtio_gpu_object_get_sg_table(vgdev, obj); in virtio_gpu_object_attach()
908 virtio_gpu_cmd_resource_attach_backing(vgdev, resource_id, in virtio_gpu_object_attach()
915 void virtio_gpu_cursor_ping(struct virtio_gpu_device *vgdev, in virtio_gpu_cursor_ping() argument
922 cur_p = virtio_gpu_alloc_cursor(vgdev, &vbuf); in virtio_gpu_cursor_ping()
924 virtio_gpu_queue_cursor(vgdev, vbuf); in virtio_gpu_cursor_ping()