Lines Matching refs:skel

134 	struct skeleton *skel = dev_id;  in skeleton_irq()  local
141 spin_lock(&skel->qlock); in skeleton_irq()
143 spin_unlock(&skel->qlock); in skeleton_irq()
145 new_buf->vb.v4l2_buf.sequence = skel->sequence++; in skeleton_irq()
146 new_buf->vb.v4l2_buf.field = skel->field; in skeleton_irq()
147 if (skel->format.field == V4L2_FIELD_ALTERNATE) { in skeleton_irq()
148 if (skel->field == V4L2_FIELD_BOTTOM) in skeleton_irq()
149 skel->field = V4L2_FIELD_TOP; in skeleton_irq()
150 else if (skel->field == V4L2_FIELD_TOP) in skeleton_irq()
151 skel->field = V4L2_FIELD_BOTTOM; in skeleton_irq()
171 struct skeleton *skel = vb2_get_drv_priv(vq); in queue_setup() local
173 skel->field = skel->format.field; in queue_setup()
174 if (skel->field == V4L2_FIELD_ALTERNATE) { in queue_setup()
181 skel->field = V4L2_FIELD_TOP; in queue_setup()
187 if (fmt && fmt->fmt.pix.sizeimage < skel->format.sizeimage) in queue_setup()
190 sizes[0] = fmt ? fmt->fmt.pix.sizeimage : skel->format.sizeimage; in queue_setup()
191 alloc_ctxs[0] = skel->alloc_ctx; in queue_setup()
201 struct skeleton *skel = vb2_get_drv_priv(vb->vb2_queue); in buffer_prepare() local
202 unsigned long size = skel->format.sizeimage; in buffer_prepare()
205 dev_err(&skel->pdev->dev, "buffer too small (%lu < %lu)\n", in buffer_prepare()
219 struct skeleton *skel = vb2_get_drv_priv(vb->vb2_queue); in buffer_queue() local
223 spin_lock_irqsave(&skel->qlock, flags); in buffer_queue()
224 list_add_tail(&buf->list, &skel->buf_list); in buffer_queue()
228 spin_unlock_irqrestore(&skel->qlock, flags); in buffer_queue()
231 static void return_all_buffers(struct skeleton *skel, in return_all_buffers() argument
237 spin_lock_irqsave(&skel->qlock, flags); in return_all_buffers()
238 list_for_each_entry_safe(buf, node, &skel->buf_list, list) { in return_all_buffers()
242 spin_unlock_irqrestore(&skel->qlock, flags); in return_all_buffers()
253 struct skeleton *skel = vb2_get_drv_priv(vq); in start_streaming() local
256 skel->sequence = 0; in start_streaming()
265 return_all_buffers(skel, VB2_BUF_STATE_QUEUED); in start_streaming()
276 struct skeleton *skel = vb2_get_drv_priv(vq); in stop_streaming() local
281 return_all_buffers(skel, VB2_BUF_STATE_ERROR); in stop_streaming()
306 struct skeleton *skel = video_drvdata(file); in skeleton_querycap() local
311 pci_name(skel->pdev)); in skeleton_querycap()
324 static void skeleton_fill_pix_format(struct skeleton *skel, in skeleton_fill_pix_format() argument
328 if (skel->input == 0) { in skeleton_fill_pix_format()
331 pix->height = (skel->std & V4L2_STD_525_60) ? 480 : 576; in skeleton_fill_pix_format()
336 pix->width = skel->timings.bt.width; in skeleton_fill_pix_format()
337 pix->height = skel->timings.bt.height; in skeleton_fill_pix_format()
338 if (skel->timings.bt.interlaced) { in skeleton_fill_pix_format()
359 struct skeleton *skel = video_drvdata(file); in skeleton_try_fmt_vid_cap() local
370 skeleton_fill_pix_format(skel, pix); in skeleton_try_fmt_vid_cap()
377 struct skeleton *skel = video_drvdata(file); in skeleton_s_fmt_vid_cap() local
388 if (vb2_is_busy(&skel->queue)) in skeleton_s_fmt_vid_cap()
392 skel->format = f->fmt.pix; in skeleton_s_fmt_vid_cap()
399 struct skeleton *skel = video_drvdata(file); in skeleton_g_fmt_vid_cap() local
401 f->fmt.pix = skel->format; in skeleton_g_fmt_vid_cap()
417 struct skeleton *skel = video_drvdata(file); in skeleton_s_std() local
420 if (skel->input) in skeleton_s_std()
428 if (std == skel->std) in skeleton_s_std()
435 if (vb2_is_busy(&skel->queue)) in skeleton_s_std()
440 skel->std = std; in skeleton_s_std()
443 skeleton_fill_pix_format(skel, &skel->format); in skeleton_s_std()
449 struct skeleton *skel = video_drvdata(file); in skeleton_g_std() local
452 if (skel->input) in skeleton_g_std()
455 *std = skel->std; in skeleton_g_std()
468 struct skeleton *skel = video_drvdata(file); in skeleton_querystd() local
471 if (skel->input) in skeleton_querystd()
496 struct skeleton *skel = video_drvdata(file); in skeleton_s_dv_timings() local
499 if (skel->input == 0) in skeleton_s_dv_timings()
512 if (v4l2_match_dv_timings(timings, &skel->timings, 0)) in skeleton_s_dv_timings()
519 if (vb2_is_busy(&skel->queue)) in skeleton_s_dv_timings()
525 skel->timings = *timings; in skeleton_s_dv_timings()
528 skeleton_fill_pix_format(skel, &skel->format); in skeleton_s_dv_timings()
535 struct skeleton *skel = video_drvdata(file); in skeleton_g_dv_timings() local
538 if (skel->input == 0) in skeleton_g_dv_timings()
541 *timings = skel->timings; in skeleton_g_dv_timings()
548 struct skeleton *skel = video_drvdata(file); in skeleton_enum_dv_timings() local
551 if (skel->input == 0) in skeleton_enum_dv_timings()
570 struct skeleton *skel = video_drvdata(file); in skeleton_query_dv_timings() local
573 if (skel->input == 0) in skeleton_query_dv_timings()
590 v4l2_print_dv_timings(skel->v4l2_dev.name, "query_dv_timings:", in skeleton_query_dv_timings()
599 struct skeleton *skel = video_drvdata(file); in skeleton_dv_timings_cap() local
602 if (skel->input == 0) in skeleton_dv_timings_cap()
629 struct skeleton *skel = video_drvdata(file); in skeleton_s_input() local
638 if (vb2_is_busy(&skel->queue)) in skeleton_s_input()
641 skel->input = i; in skeleton_s_input()
647 skel->vdev.tvnorms = i ? 0 : SKEL_TVNORMS; in skeleton_s_input()
650 skeleton_fill_pix_format(skel, &skel->format); in skeleton_s_input()
656 struct skeleton *skel = video_drvdata(file); in skeleton_g_input() local
658 *i = skel->input; in skeleton_g_input()
764 struct skeleton *skel; in skeleton_probe() local
781 skel = devm_kzalloc(&pdev->dev, sizeof(struct skeleton), GFP_KERNEL); in skeleton_probe()
782 if (!skel) in skeleton_probe()
787 skeleton_irq, 0, KBUILD_MODNAME, skel); in skeleton_probe()
792 skel->pdev = pdev; in skeleton_probe()
795 skel->timings = timings_def; in skeleton_probe()
796 skel->std = V4L2_STD_625_50; in skeleton_probe()
797 skeleton_fill_pix_format(skel, &skel->format); in skeleton_probe()
800 ret = v4l2_device_register(&pdev->dev, &skel->v4l2_dev); in skeleton_probe()
804 mutex_init(&skel->lock); in skeleton_probe()
807 hdl = &skel->ctrl_handler; in skeleton_probe()
821 skel->v4l2_dev.ctrl_handler = hdl; in skeleton_probe()
824 q = &skel->queue; in skeleton_probe()
827 q->drv_priv = skel; in skeleton_probe()
847 q->lock = &skel->lock; in skeleton_probe()
857 skel->alloc_ctx = vb2_dma_contig_init_ctx(&pdev->dev); in skeleton_probe()
858 if (IS_ERR(skel->alloc_ctx)) { in skeleton_probe()
860 ret = PTR_ERR(skel->alloc_ctx); in skeleton_probe()
863 INIT_LIST_HEAD(&skel->buf_list); in skeleton_probe()
864 spin_lock_init(&skel->qlock); in skeleton_probe()
867 vdev = &skel->vdev; in skeleton_probe()
881 vdev->lock = &skel->lock; in skeleton_probe()
883 vdev->v4l2_dev = &skel->v4l2_dev; in skeleton_probe()
886 video_set_drvdata(vdev, skel); in skeleton_probe()
896 vb2_dma_contig_cleanup_ctx(skel->alloc_ctx); in skeleton_probe()
898 v4l2_ctrl_handler_free(&skel->ctrl_handler); in skeleton_probe()
899 v4l2_device_unregister(&skel->v4l2_dev); in skeleton_probe()
908 struct skeleton *skel = container_of(v4l2_dev, struct skeleton, v4l2_dev); in skeleton_remove() local
910 video_unregister_device(&skel->vdev); in skeleton_remove()
911 v4l2_ctrl_handler_free(&skel->ctrl_handler); in skeleton_remove()
912 vb2_dma_contig_cleanup_ctx(skel->alloc_ctx); in skeleton_remove()
913 v4l2_device_unregister(&skel->v4l2_dev); in skeleton_remove()
914 pci_disable_device(skel->pdev); in skeleton_remove()