Lines Matching refs:buf
48 struct cx88_buffer *buf) in cx8800_start_vbi_dma() argument
54 VBI_LINE_LENGTH, buf->risc.dma); in cx8800_start_vbi_dma()
96 struct cx88_buffer *buf; in cx8800_restart_vbi_queue() local
101 buf = list_entry(q->active.next, struct cx88_buffer, list); in cx8800_restart_vbi_queue()
103 buf, buf->vb.vb2_buf.index); in cx8800_restart_vbi_queue()
104 cx8800_start_vbi_dma(dev, q, buf); in cx8800_restart_vbi_queue()
130 struct cx88_buffer *buf = container_of(vbuf, struct cx88_buffer, vb); in buffer_prepare() local
144 cx88_risc_buffer(dev->pci, &buf->risc, sgt->sgl, in buffer_prepare()
155 struct cx88_buffer *buf = container_of(vbuf, struct cx88_buffer, vb); in buffer_finish() local
156 struct cx88_riscmem *risc = &buf->risc; in buffer_finish()
167 struct cx88_buffer *buf = container_of(vbuf, struct cx88_buffer, vb); in buffer_queue() local
172 buf->risc.cpu[1] = cpu_to_le32(buf->risc.dma + 8); in buffer_queue()
173 buf->risc.jmp[0] = cpu_to_le32(RISC_JUMP | RISC_CNT_INC); in buffer_queue()
174 buf->risc.jmp[1] = cpu_to_le32(buf->risc.dma + 8); in buffer_queue()
177 list_add_tail(&buf->list, &q->active); in buffer_queue()
178 cx8800_start_vbi_dma(dev, q, buf); in buffer_queue()
180 buf, buf->vb.vb2_buf.index); in buffer_queue()
183 buf->risc.cpu[0] |= cpu_to_le32(RISC_IRQ1); in buffer_queue()
185 list_add_tail(&buf->list, &q->active); in buffer_queue()
186 prev->risc.jmp[1] = cpu_to_le32(buf->risc.dma); in buffer_queue()
188 buf, buf->vb.vb2_buf.index); in buffer_queue()
196 struct cx88_buffer *buf = list_entry(dmaq->active.next, in start_streaming() local
199 cx8800_start_vbi_dma(dev, dmaq, buf); in start_streaming()
215 struct cx88_buffer *buf = list_entry(dmaq->active.next, in stop_streaming() local
218 list_del(&buf->list); in stop_streaming()
219 vb2_buffer_done(&buf->vb.vb2_buf, VB2_BUF_STATE_ERROR); in stop_streaming()