Lines Matching refs:q
37 static void __vb2_queue_cancel(struct vb2_queue *q);
45 struct vb2_queue *q = vb->vb2_queue; in __vb2_buf_mem_alloc() local
47 q->is_output ? DMA_TO_DEVICE : DMA_FROM_DEVICE; in __vb2_buf_mem_alloc()
56 unsigned long size = PAGE_ALIGN(q->plane_sizes[plane]); in __vb2_buf_mem_alloc()
58 mem_priv = call_ptr_memop(vb, alloc, q->alloc_ctx[plane], in __vb2_buf_mem_alloc()
59 size, dma_dir, q->gfp_flags); in __vb2_buf_mem_alloc()
65 vb->planes[plane].length = q->plane_sizes[plane]; in __vb2_buf_mem_alloc()
143 static void __setup_lengths(struct vb2_queue *q, unsigned int n) in __setup_lengths() argument
148 for (buffer = q->num_buffers; buffer < q->num_buffers + n; ++buffer) { in __setup_lengths()
149 vb = q->bufs[buffer]; in __setup_lengths()
154 vb->planes[plane].length = q->plane_sizes[plane]; in __setup_lengths()
162 static void __setup_offsets(struct vb2_queue *q, unsigned int n) in __setup_offsets() argument
168 if (q->num_buffers) { in __setup_offsets()
170 vb = q->bufs[q->num_buffers - 1]; in __setup_offsets()
177 for (buffer = q->num_buffers; buffer < q->num_buffers + n; ++buffer) { in __setup_offsets()
178 vb = q->bufs[buffer]; in __setup_offsets()
201 static int __vb2_queue_alloc(struct vb2_queue *q, enum vb2_memory memory, in __vb2_queue_alloc() argument
210 vb = kzalloc(q->buf_struct_size, GFP_KERNEL); in __vb2_queue_alloc()
217 vb->vb2_queue = q; in __vb2_queue_alloc()
219 vb->index = q->num_buffers + buffer; in __vb2_queue_alloc()
220 vb->type = q->type; in __vb2_queue_alloc()
247 q->bufs[q->num_buffers + buffer] = vb; in __vb2_queue_alloc()
250 __setup_lengths(q, buffer); in __vb2_queue_alloc()
252 __setup_offsets(q, buffer); in __vb2_queue_alloc()
263 static void __vb2_free_mem(struct vb2_queue *q, unsigned int buffers) in __vb2_free_mem() argument
268 for (buffer = q->num_buffers - buffers; buffer < q->num_buffers; in __vb2_free_mem()
270 vb = q->bufs[buffer]; in __vb2_free_mem()
275 if (q->memory == VB2_MEMORY_MMAP) in __vb2_free_mem()
277 else if (q->memory == VB2_MEMORY_DMABUF) in __vb2_free_mem()
289 static int __vb2_queue_free(struct vb2_queue *q, unsigned int buffers) in __vb2_queue_free() argument
301 for (buffer = q->num_buffers - buffers; buffer < q->num_buffers; in __vb2_queue_free()
303 if (q->bufs[buffer] == NULL) in __vb2_queue_free()
305 if (q->bufs[buffer]->state == VB2_BUF_STATE_PREPARING) { in __vb2_queue_free()
312 for (buffer = q->num_buffers - buffers; buffer < q->num_buffers; in __vb2_queue_free()
314 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free()
321 __vb2_free_mem(q, buffers); in __vb2_queue_free()
329 if (q->num_buffers) { in __vb2_queue_free()
330 bool unbalanced = q->cnt_start_streaming != q->cnt_stop_streaming || in __vb2_queue_free()
331 q->cnt_wait_prepare != q->cnt_wait_finish; in __vb2_queue_free()
334 pr_info("vb2: counters for queue %p:%s\n", q, in __vb2_queue_free()
337 q->cnt_queue_setup, q->cnt_start_streaming, in __vb2_queue_free()
338 q->cnt_stop_streaming); in __vb2_queue_free()
340 q->cnt_wait_prepare, q->cnt_wait_finish); in __vb2_queue_free()
342 q->cnt_queue_setup = 0; in __vb2_queue_free()
343 q->cnt_wait_prepare = 0; in __vb2_queue_free()
344 q->cnt_wait_finish = 0; in __vb2_queue_free()
345 q->cnt_start_streaming = 0; in __vb2_queue_free()
346 q->cnt_stop_streaming = 0; in __vb2_queue_free()
348 for (buffer = 0; buffer < q->num_buffers; ++buffer) { in __vb2_queue_free()
349 struct vb2_buffer *vb = q->bufs[buffer]; in __vb2_queue_free()
361 q, buffer, unbalanced ? " UNBALANCED!" : ""); in __vb2_queue_free()
386 for (buffer = q->num_buffers - buffers; buffer < q->num_buffers; in __vb2_queue_free()
388 kfree(q->bufs[buffer]); in __vb2_queue_free()
389 q->bufs[buffer] = NULL; in __vb2_queue_free()
392 q->num_buffers -= buffers; in __vb2_queue_free()
393 if (!q->num_buffers) { in __vb2_queue_free()
394 q->memory = 0; in __vb2_queue_free()
395 INIT_LIST_HEAD(&q->queued_list); in __vb2_queue_free()
404 bool vb2_buffer_in_use(struct vb2_queue *q, struct vb2_buffer *vb) in vb2_buffer_in_use() argument
426 static bool __buffers_in_use(struct vb2_queue *q) in __buffers_in_use() argument
429 for (buffer = 0; buffer < q->num_buffers; ++buffer) { in __buffers_in_use()
430 if (vb2_buffer_in_use(q, q->bufs[buffer])) in __buffers_in_use()
449 int vb2_core_querybuf(struct vb2_queue *q, unsigned int index, void *pb) in vb2_core_querybuf() argument
451 return call_bufop(q, fill_user_buffer, q->bufs[index], pb); in vb2_core_querybuf()
459 static int __verify_userptr_ops(struct vb2_queue *q) in __verify_userptr_ops() argument
461 if (!(q->io_modes & VB2_USERPTR) || !q->mem_ops->get_userptr || in __verify_userptr_ops()
462 !q->mem_ops->put_userptr) in __verify_userptr_ops()
472 static int __verify_mmap_ops(struct vb2_queue *q) in __verify_mmap_ops() argument
474 if (!(q->io_modes & VB2_MMAP) || !q->mem_ops->alloc || in __verify_mmap_ops()
475 !q->mem_ops->put || !q->mem_ops->mmap) in __verify_mmap_ops()
485 static int __verify_dmabuf_ops(struct vb2_queue *q) in __verify_dmabuf_ops() argument
487 if (!(q->io_modes & VB2_DMABUF) || !q->mem_ops->attach_dmabuf || in __verify_dmabuf_ops()
488 !q->mem_ops->detach_dmabuf || !q->mem_ops->map_dmabuf || in __verify_dmabuf_ops()
489 !q->mem_ops->unmap_dmabuf) in __verify_dmabuf_ops()
499 int vb2_verify_memory_type(struct vb2_queue *q, in vb2_verify_memory_type() argument
508 if (type != q->type) { in vb2_verify_memory_type()
517 if (memory == VB2_MEMORY_MMAP && __verify_mmap_ops(q)) { in vb2_verify_memory_type()
522 if (memory == VB2_MEMORY_USERPTR && __verify_userptr_ops(q)) { in vb2_verify_memory_type()
527 if (memory == VB2_MEMORY_DMABUF && __verify_dmabuf_ops(q)) { in vb2_verify_memory_type()
537 if (vb2_fileio_is_active(q)) { in vb2_verify_memory_type()
569 int vb2_core_reqbufs(struct vb2_queue *q, enum vb2_memory memory, in vb2_core_reqbufs() argument
575 if (q->streaming) { in vb2_core_reqbufs()
580 if (*count == 0 || q->num_buffers != 0 || q->memory != memory) { in vb2_core_reqbufs()
585 mutex_lock(&q->mmap_lock); in vb2_core_reqbufs()
586 if (q->memory == VB2_MEMORY_MMAP && __buffers_in_use(q)) { in vb2_core_reqbufs()
587 mutex_unlock(&q->mmap_lock); in vb2_core_reqbufs()
597 __vb2_queue_cancel(q); in vb2_core_reqbufs()
598 ret = __vb2_queue_free(q, q->num_buffers); in vb2_core_reqbufs()
599 mutex_unlock(&q->mmap_lock); in vb2_core_reqbufs()
615 num_buffers = max_t(unsigned int, num_buffers, q->min_buffers_needed); in vb2_core_reqbufs()
616 memset(q->plane_sizes, 0, sizeof(q->plane_sizes)); in vb2_core_reqbufs()
617 memset(q->alloc_ctx, 0, sizeof(q->alloc_ctx)); in vb2_core_reqbufs()
618 q->memory = memory; in vb2_core_reqbufs()
624 ret = call_qop(q, queue_setup, q, NULL, &num_buffers, &num_planes, in vb2_core_reqbufs()
625 q->plane_sizes, q->alloc_ctx); in vb2_core_reqbufs()
631 __vb2_queue_alloc(q, memory, num_buffers, num_planes); in vb2_core_reqbufs()
641 if (allocated_buffers < q->min_buffers_needed) in vb2_core_reqbufs()
650 ret = call_qop(q, queue_setup, q, NULL, &num_buffers, in vb2_core_reqbufs()
651 &num_planes, q->plane_sizes, q->alloc_ctx); in vb2_core_reqbufs()
662 mutex_lock(&q->mmap_lock); in vb2_core_reqbufs()
663 q->num_buffers = allocated_buffers; in vb2_core_reqbufs()
670 __vb2_queue_free(q, allocated_buffers); in vb2_core_reqbufs()
671 mutex_unlock(&q->mmap_lock); in vb2_core_reqbufs()
674 mutex_unlock(&q->mmap_lock); in vb2_core_reqbufs()
681 q->waiting_for_buffers = !q->is_output; in vb2_core_reqbufs()
703 int vb2_core_create_bufs(struct vb2_queue *q, enum vb2_memory memory, in vb2_core_create_bufs() argument
709 if (q->num_buffers == VB2_MAX_FRAME) { in vb2_core_create_bufs()
714 if (!q->num_buffers) { in vb2_core_create_bufs()
715 memset(q->plane_sizes, 0, sizeof(q->plane_sizes)); in vb2_core_create_bufs()
716 memset(q->alloc_ctx, 0, sizeof(q->alloc_ctx)); in vb2_core_create_bufs()
717 q->memory = memory; in vb2_core_create_bufs()
718 q->waiting_for_buffers = !q->is_output; in vb2_core_create_bufs()
721 num_buffers = min(*count, VB2_MAX_FRAME - q->num_buffers); in vb2_core_create_bufs()
727 ret = call_qop(q, queue_setup, q, parg, &num_buffers, in vb2_core_create_bufs()
728 &num_planes, q->plane_sizes, q->alloc_ctx); in vb2_core_create_bufs()
733 allocated_buffers = __vb2_queue_alloc(q, memory, num_buffers, in vb2_core_create_bufs()
750 ret = call_qop(q, queue_setup, q, parg, &num_buffers, in vb2_core_create_bufs()
751 &num_planes, q->plane_sizes, q->alloc_ctx); in vb2_core_create_bufs()
762 mutex_lock(&q->mmap_lock); in vb2_core_create_bufs()
763 q->num_buffers += allocated_buffers; in vb2_core_create_bufs()
770 __vb2_queue_free(q, allocated_buffers); in vb2_core_create_bufs()
771 mutex_unlock(&q->mmap_lock); in vb2_core_create_bufs()
774 mutex_unlock(&q->mmap_lock); in vb2_core_create_bufs()
846 struct vb2_queue *q = vb->vb2_queue; in vb2_buffer_done() local
873 spin_lock_irqsave(&q->done_lock, flags); in vb2_buffer_done()
879 list_add_tail(&vb->done_entry, &q->done_list); in vb2_buffer_done()
882 atomic_dec(&q->owned_by_drv_count); in vb2_buffer_done()
883 spin_unlock_irqrestore(&q->done_lock, flags); in vb2_buffer_done()
885 trace_vb2_buf_done(q, vb); in vb2_buffer_done()
891 if (q->start_streaming_called) in vb2_buffer_done()
896 wake_up(&q->done_wq); in vb2_buffer_done()
914 void vb2_discard_done(struct vb2_queue *q) in vb2_discard_done() argument
919 spin_lock_irqsave(&q->done_lock, flags); in vb2_discard_done()
920 list_for_each_entry(vb, &q->done_list, done_entry) in vb2_discard_done()
922 spin_unlock_irqrestore(&q->done_lock, flags); in vb2_discard_done()
942 struct vb2_queue *q = vb->vb2_queue; in __qbuf_userptr() local
947 q->is_output ? DMA_TO_DEVICE : DMA_FROM_DEVICE; in __qbuf_userptr()
967 if (planes[plane].length < q->plane_sizes[plane]) { in __qbuf_userptr()
971 q->plane_sizes[plane], plane); in __qbuf_userptr()
992 mem_priv = call_ptr_memop(vb, get_userptr, q->alloc_ctx[plane], in __qbuf_userptr()
1056 struct vb2_queue *q = vb->vb2_queue; in __qbuf_dmabuf() local
1061 q->is_output ? DMA_TO_DEVICE : DMA_FROM_DEVICE; in __qbuf_dmabuf()
1084 if (planes[plane].length < q->plane_sizes[plane]) { in __qbuf_dmabuf()
1114 q->alloc_ctx[plane], dbuf, planes[plane].length, in __qbuf_dmabuf()
1184 struct vb2_queue *q = vb->vb2_queue; in __enqueue_in_driver() local
1188 atomic_inc(&q->owned_by_drv_count); in __enqueue_in_driver()
1190 trace_vb2_buf_queue(q, vb); in __enqueue_in_driver()
1201 struct vb2_queue *q = vb->vb2_queue; in __buf_prepare() local
1204 if (q->error) { in __buf_prepare()
1211 switch (q->memory) { in __buf_prepare()
1249 int vb2_core_prepare_buf(struct vb2_queue *q, unsigned int index, void *pb) in vb2_core_prepare_buf() argument
1254 vb = q->bufs[index]; in vb2_core_prepare_buf()
1266 ret = call_bufop(q, fill_user_buffer, vb, pb); in vb2_core_prepare_buf()
1287 static int vb2_start_streaming(struct vb2_queue *q) in vb2_start_streaming() argument
1296 list_for_each_entry(vb, &q->queued_list, queued_entry) in vb2_start_streaming()
1300 q->start_streaming_called = 1; in vb2_start_streaming()
1301 ret = call_qop(q, start_streaming, q, in vb2_start_streaming()
1302 atomic_read(&q->owned_by_drv_count)); in vb2_start_streaming()
1306 q->start_streaming_called = 0; in vb2_start_streaming()
1315 if (WARN_ON(atomic_read(&q->owned_by_drv_count))) { in vb2_start_streaming()
1322 for (i = 0; i < q->num_buffers; ++i) { in vb2_start_streaming()
1323 vb = q->bufs[i]; in vb2_start_streaming()
1328 WARN_ON(atomic_read(&q->owned_by_drv_count)); in vb2_start_streaming()
1335 WARN_ON(!list_empty(&q->done_list)); in vb2_start_streaming()
1357 int vb2_core_qbuf(struct vb2_queue *q, unsigned int index, void *pb) in vb2_core_qbuf() argument
1362 vb = q->bufs[index]; in vb2_core_qbuf()
1384 list_add_tail(&vb->queued_entry, &q->queued_list); in vb2_core_qbuf()
1385 q->queued_count++; in vb2_core_qbuf()
1386 q->waiting_for_buffers = false; in vb2_core_qbuf()
1389 call_bufop(q, set_timestamp, vb, pb); in vb2_core_qbuf()
1391 trace_vb2_qbuf(q, vb); in vb2_core_qbuf()
1397 if (q->start_streaming_called) in vb2_core_qbuf()
1401 ret = call_bufop(q, fill_user_buffer, vb, pb); in vb2_core_qbuf()
1411 if (q->streaming && !q->start_streaming_called && in vb2_core_qbuf()
1412 q->queued_count >= q->min_buffers_needed) { in vb2_core_qbuf()
1413 ret = vb2_start_streaming(q); in vb2_core_qbuf()
1429 static int __vb2_wait_for_done_vb(struct vb2_queue *q, int nonblocking) in __vb2_wait_for_done_vb() argument
1443 if (!q->streaming) { in __vb2_wait_for_done_vb()
1448 if (q->error) { in __vb2_wait_for_done_vb()
1453 if (q->last_buffer_dequeued) { in __vb2_wait_for_done_vb()
1458 if (!list_empty(&q->done_list)) { in __vb2_wait_for_done_vb()
1476 call_void_qop(q, wait_prepare, q); in __vb2_wait_for_done_vb()
1482 ret = wait_event_interruptible(q->done_wq, in __vb2_wait_for_done_vb()
1483 !list_empty(&q->done_list) || !q->streaming || in __vb2_wait_for_done_vb()
1484 q->error); in __vb2_wait_for_done_vb()
1490 call_void_qop(q, wait_finish, q); in __vb2_wait_for_done_vb()
1504 static int __vb2_get_done_vb(struct vb2_queue *q, struct vb2_buffer **vb, in __vb2_get_done_vb() argument
1513 ret = __vb2_wait_for_done_vb(q, nonblocking); in __vb2_get_done_vb()
1521 spin_lock_irqsave(&q->done_lock, flags); in __vb2_get_done_vb()
1522 *vb = list_first_entry(&q->done_list, struct vb2_buffer, done_entry); in __vb2_get_done_vb()
1527 ret = call_bufop(q, verify_planes_array, *vb, pb); in __vb2_get_done_vb()
1530 spin_unlock_irqrestore(&q->done_lock, flags); in __vb2_get_done_vb()
1544 int vb2_wait_for_all_buffers(struct vb2_queue *q) in vb2_wait_for_all_buffers() argument
1546 if (!q->streaming) { in vb2_wait_for_all_buffers()
1551 if (q->start_streaming_called) in vb2_wait_for_all_buffers()
1552 wait_event(q->done_wq, !atomic_read(&q->owned_by_drv_count)); in vb2_wait_for_all_buffers()
1562 struct vb2_queue *q = vb->vb2_queue; in __vb2_dqbuf() local
1572 if (q->memory == VB2_MEMORY_DMABUF) in __vb2_dqbuf()
1602 int vb2_core_dqbuf(struct vb2_queue *q, void *pb, bool nonblocking) in vb2_core_dqbuf() argument
1607 ret = __vb2_get_done_vb(q, &vb, pb, nonblocking); in vb2_core_dqbuf()
1626 ret = call_bufop(q, fill_user_buffer, vb, pb); in vb2_core_dqbuf()
1632 q->queued_count--; in vb2_core_dqbuf()
1634 trace_vb2_dqbuf(q, vb); in vb2_core_dqbuf()
1653 static void __vb2_queue_cancel(struct vb2_queue *q) in __vb2_queue_cancel() argument
1661 if (q->start_streaming_called) in __vb2_queue_cancel()
1662 call_void_qop(q, stop_streaming, q); in __vb2_queue_cancel()
1670 if (WARN_ON(atomic_read(&q->owned_by_drv_count))) { in __vb2_queue_cancel()
1671 for (i = 0; i < q->num_buffers; ++i) in __vb2_queue_cancel()
1672 if (q->bufs[i]->state == VB2_BUF_STATE_ACTIVE) in __vb2_queue_cancel()
1673 vb2_buffer_done(q->bufs[i], VB2_BUF_STATE_ERROR); in __vb2_queue_cancel()
1675 WARN_ON(atomic_read(&q->owned_by_drv_count)); in __vb2_queue_cancel()
1678 q->streaming = 0; in __vb2_queue_cancel()
1679 q->start_streaming_called = 0; in __vb2_queue_cancel()
1680 q->queued_count = 0; in __vb2_queue_cancel()
1681 q->error = 0; in __vb2_queue_cancel()
1686 INIT_LIST_HEAD(&q->queued_list); in __vb2_queue_cancel()
1691 INIT_LIST_HEAD(&q->done_list); in __vb2_queue_cancel()
1692 atomic_set(&q->owned_by_drv_count, 0); in __vb2_queue_cancel()
1693 wake_up_all(&q->done_wq); in __vb2_queue_cancel()
1704 for (i = 0; i < q->num_buffers; ++i) { in __vb2_queue_cancel()
1705 struct vb2_buffer *vb = q->bufs[i]; in __vb2_queue_cancel()
1715 int vb2_core_streamon(struct vb2_queue *q, unsigned int type) in vb2_core_streamon() argument
1719 if (type != q->type) { in vb2_core_streamon()
1724 if (q->streaming) { in vb2_core_streamon()
1729 if (!q->num_buffers) { in vb2_core_streamon()
1734 if (q->num_buffers < q->min_buffers_needed) { in vb2_core_streamon()
1736 q->min_buffers_needed); in vb2_core_streamon()
1744 if (q->queued_count >= q->min_buffers_needed) { in vb2_core_streamon()
1745 ret = vb2_start_streaming(q); in vb2_core_streamon()
1747 __vb2_queue_cancel(q); in vb2_core_streamon()
1752 q->streaming = 1; in vb2_core_streamon()
1772 void vb2_queue_error(struct vb2_queue *q) in vb2_queue_error() argument
1774 q->error = 1; in vb2_queue_error()
1776 wake_up_all(&q->done_wq); in vb2_queue_error()
1780 int vb2_core_streamoff(struct vb2_queue *q, unsigned int type) in vb2_core_streamoff() argument
1782 if (type != q->type) { in vb2_core_streamoff()
1796 __vb2_queue_cancel(q); in vb2_core_streamoff()
1797 q->waiting_for_buffers = !q->is_output; in vb2_core_streamoff()
1798 q->last_buffer_dequeued = false; in vb2_core_streamoff()
1808 static int __find_plane_by_offset(struct vb2_queue *q, unsigned long off, in __find_plane_by_offset() argument
1819 for (buffer = 0; buffer < q->num_buffers; ++buffer) { in __find_plane_by_offset()
1820 vb = q->bufs[buffer]; in __find_plane_by_offset()
1847 int vb2_core_expbuf(struct vb2_queue *q, int *fd, unsigned int type, in vb2_core_expbuf() argument
1855 if (q->memory != VB2_MEMORY_MMAP) { in vb2_core_expbuf()
1860 if (!q->mem_ops->get_dmabuf) { in vb2_core_expbuf()
1870 if (type != q->type) { in vb2_core_expbuf()
1875 if (index >= q->num_buffers) { in vb2_core_expbuf()
1880 vb = q->bufs[index]; in vb2_core_expbuf()
1887 if (vb2_fileio_is_active(q)) { in vb2_core_expbuf()
1937 int vb2_mmap(struct vb2_queue *q, struct vm_area_struct *vma) in vb2_mmap() argument
1945 if (q->memory != VB2_MEMORY_MMAP) { in vb2_mmap()
1957 if (q->is_output) { in vb2_mmap()
1968 if (vb2_fileio_is_active(q)) { in vb2_mmap()
1976 ret = __find_plane_by_offset(q, off, &buffer, &plane); in vb2_mmap()
1980 vb = q->bufs[buffer]; in vb2_mmap()
1994 mutex_lock(&q->mmap_lock); in vb2_mmap()
1996 mutex_unlock(&q->mmap_lock); in vb2_mmap()
2006 unsigned long vb2_get_unmapped_area(struct vb2_queue *q, in vb2_get_unmapped_area() argument
2018 if (q->memory != VB2_MEMORY_MMAP) { in vb2_get_unmapped_area()
2026 ret = __find_plane_by_offset(q, off, &buffer, &plane); in vb2_get_unmapped_area()
2030 vb = q->bufs[buffer]; in vb2_get_unmapped_area()
2049 int vb2_core_queue_init(struct vb2_queue *q) in vb2_core_queue_init() argument
2054 if (WARN_ON(!q) || in vb2_core_queue_init()
2055 WARN_ON(!q->ops) || in vb2_core_queue_init()
2056 WARN_ON(!q->mem_ops) || in vb2_core_queue_init()
2057 WARN_ON(!q->type) || in vb2_core_queue_init()
2058 WARN_ON(!q->io_modes) || in vb2_core_queue_init()
2059 WARN_ON(!q->ops->queue_setup) || in vb2_core_queue_init()
2060 WARN_ON(!q->ops->buf_queue)) in vb2_core_queue_init()
2063 INIT_LIST_HEAD(&q->queued_list); in vb2_core_queue_init()
2064 INIT_LIST_HEAD(&q->done_list); in vb2_core_queue_init()
2065 spin_lock_init(&q->done_lock); in vb2_core_queue_init()
2066 mutex_init(&q->mmap_lock); in vb2_core_queue_init()
2067 init_waitqueue_head(&q->done_wq); in vb2_core_queue_init()
2069 if (q->buf_struct_size == 0) in vb2_core_queue_init()
2070 q->buf_struct_size = sizeof(struct vb2_buffer); in vb2_core_queue_init()
2084 void vb2_core_queue_release(struct vb2_queue *q) in vb2_core_queue_release() argument
2086 __vb2_queue_cancel(q); in vb2_core_queue_release()
2087 mutex_lock(&q->mmap_lock); in vb2_core_queue_release()
2088 __vb2_queue_free(q, q->num_buffers); in vb2_core_queue_release()
2089 mutex_unlock(&q->mmap_lock); in vb2_core_queue_release()