Lines Matching refs:vb
79 layer->cur_frm->vb.v4l2_buf.timestamp.tv_sec = in vpbe_isr_even_field()
81 layer->cur_frm->vb.v4l2_buf.timestamp.tv_usec = in vpbe_isr_even_field()
83 vb2_buffer_done(&layer->cur_frm->vb, VB2_BUF_STATE_DONE); in vpbe_isr_even_field()
112 layer->next_frm->vb.state = VB2_BUF_STATE_ACTIVE; in vpbe_isr_odd_field()
113 addr = vb2_dma_contig_plane_dma_addr(&layer->next_frm->vb, 0); in vpbe_isr_odd_field()
208 static int vpbe_buffer_prepare(struct vb2_buffer *vb) in vpbe_buffer_prepare() argument
210 struct vb2_queue *q = vb->vb2_queue; in vpbe_buffer_prepare()
218 vb2_set_plane_payload(vb, 0, layer->pix_fmt.sizeimage); in vpbe_buffer_prepare()
219 if (vb2_get_plane_payload(vb, 0) > vb2_plane_size(vb, 0)) in vpbe_buffer_prepare()
222 addr = vb2_dma_contig_plane_dma_addr(vb, 0); in vpbe_buffer_prepare()
265 static void vpbe_buffer_queue(struct vb2_buffer *vb) in vpbe_buffer_queue() argument
268 struct vpbe_disp_buffer *buf = container_of(vb, in vpbe_buffer_queue()
269 struct vpbe_disp_buffer, vb); in vpbe_buffer_queue()
270 struct vpbe_layer *layer = vb2_get_drv_priv(vb->vb2_queue); in vpbe_buffer_queue()
298 layer->cur_frm->vb.state = VB2_BUF_STATE_ACTIVE; in vpbe_start_streaming()
307 vb2_buffer_done(&layer->cur_frm->vb, VB2_BUF_STATE_QUEUED); in vpbe_start_streaming()
310 vb2_buffer_done(&buf->vb, VB2_BUF_STATE_QUEUED); in vpbe_start_streaming()
340 vb2_buffer_done(&layer->cur_frm->vb, VB2_BUF_STATE_ERROR); in vpbe_stop_streaming()
343 vb2_buffer_done(&layer->cur_frm->vb, in vpbe_stop_streaming()
346 vb2_buffer_done(&layer->next_frm->vb, in vpbe_stop_streaming()
354 vb2_buffer_done(&layer->next_frm->vb, VB2_BUF_STATE_ERROR); in vpbe_stop_streaming()
391 addr = vb2_dma_contig_plane_dma_addr(&layer->cur_frm->vb, 0); in vpbe_set_osd_display_params()