root/include/media/v4l2-mem2mem.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. v4l2_m2m_buf_done
  2. v4l2_m2m_unregister_media_controller
  3. v4l2_m2m_register_media_controller
  4. v4l2_m2m_set_src_buffered
  5. v4l2_m2m_set_dst_buffered
  6. v4l2_m2m_num_src_bufs_ready
  7. v4l2_m2m_num_dst_bufs_ready
  8. v4l2_m2m_next_src_buf
  9. v4l2_m2m_next_dst_buf
  10. v4l2_m2m_last_src_buf
  11. v4l2_m2m_last_dst_buf
  12. v4l2_m2m_get_src_vq
  13. v4l2_m2m_get_dst_vq
  14. v4l2_m2m_src_buf_remove
  15. v4l2_m2m_dst_buf_remove
  16. v4l2_m2m_src_buf_remove_by_buf
  17. v4l2_m2m_dst_buf_remove_by_buf
  18. v4l2_m2m_src_buf_remove_by_idx
  19. v4l2_m2m_dst_buf_remove_by_idx

   1 /* SPDX-License-Identifier: GPL-2.0-or-later */
   2 /*
   3  * Memory-to-memory device framework for Video for Linux 2.
   4  *
   5  * Helper functions for devices that use memory buffers for both source
   6  * and destination.
   7  *
   8  * Copyright (c) 2009 Samsung Electronics Co., Ltd.
   9  * Pawel Osciak, <pawel@osciak.com>
  10  * Marek Szyprowski, <m.szyprowski@samsung.com>
  11  */
  12 
  13 #ifndef _MEDIA_V4L2_MEM2MEM_H
  14 #define _MEDIA_V4L2_MEM2MEM_H
  15 
  16 #include <media/videobuf2-v4l2.h>
  17 
  18 /**
  19  * struct v4l2_m2m_ops - mem-to-mem device driver callbacks
  20  * @device_run: required. Begin the actual job (transaction) inside this
  21  *              callback.
  22  *              The job does NOT have to end before this callback returns
  23  *              (and it will be the usual case). When the job finishes,
  24  *              v4l2_m2m_job_finish() has to be called.
  25  * @job_ready:  optional. Should return 0 if the driver does not have a job
  26  *              fully prepared to run yet (i.e. it will not be able to finish a
  27  *              transaction without sleeping). If not provided, it will be
  28  *              assumed that one source and one destination buffer are all
  29  *              that is required for the driver to perform one full transaction.
  30  *              This method may not sleep.
  31  * @job_abort:  optional. Informs the driver that it has to abort the currently
  32  *              running transaction as soon as possible (i.e. as soon as it can
  33  *              stop the device safely; e.g. in the next interrupt handler),
  34  *              even if the transaction would not have been finished by then.
  35  *              After the driver performs the necessary steps, it has to call
  36  *              v4l2_m2m_job_finish() (as if the transaction ended normally).
  37  *              This function does not have to (and will usually not) wait
  38  *              until the device enters a state when it can be stopped.
  39  */
  40 struct v4l2_m2m_ops {
  41         void (*device_run)(void *priv);
  42         int (*job_ready)(void *priv);
  43         void (*job_abort)(void *priv);
  44 };
  45 
  46 struct video_device;
  47 struct v4l2_m2m_dev;
  48 
  49 /**
  50  * struct v4l2_m2m_queue_ctx - represents a queue for buffers ready to be
  51  *      processed
  52  *
  53  * @q:          pointer to struct &vb2_queue
  54  * @rdy_queue:  List of V4L2 mem-to-mem queues
  55  * @rdy_spinlock: spin lock to protect the struct usage
  56  * @num_rdy:    number of buffers ready to be processed
  57  * @buffered:   is the queue buffered?
  58  *
  59  * Queue for buffers ready to be processed as soon as this
  60  * instance receives access to the device.
  61  */
  62 
  63 struct v4l2_m2m_queue_ctx {
  64         struct vb2_queue        q;
  65 
  66         struct list_head        rdy_queue;
  67         spinlock_t              rdy_spinlock;
  68         u8                      num_rdy;
  69         bool                    buffered;
  70 };
  71 
  72 /**
  73  * struct v4l2_m2m_ctx - Memory to memory context structure
  74  *
  75  * @q_lock: struct &mutex lock
  76  * @m2m_dev: opaque pointer to the internal data to handle M2M context
  77  * @cap_q_ctx: Capture (output to memory) queue context
  78  * @out_q_ctx: Output (input from memory) queue context
  79  * @queue: List of memory to memory contexts
  80  * @job_flags: Job queue flags, used internally by v4l2-mem2mem.c:
  81  *              %TRANS_QUEUED, %TRANS_RUNNING and %TRANS_ABORT.
  82  * @finished: Wait queue used to signalize when a job queue finished.
  83  * @priv: Instance private data
  84  *
  85  * The memory to memory context is specific to a file handle, NOT to e.g.
  86  * a device.
  87  */
  88 struct v4l2_m2m_ctx {
  89         /* optional cap/out vb2 queues lock */
  90         struct mutex                    *q_lock;
  91 
  92         /* internal use only */
  93         struct v4l2_m2m_dev             *m2m_dev;
  94 
  95         struct v4l2_m2m_queue_ctx       cap_q_ctx;
  96 
  97         struct v4l2_m2m_queue_ctx       out_q_ctx;
  98 
  99         /* For device job queue */
 100         struct list_head                queue;
 101         unsigned long                   job_flags;
 102         wait_queue_head_t               finished;
 103 
 104         void                            *priv;
 105 };
 106 
 107 /**
 108  * struct v4l2_m2m_buffer - Memory to memory buffer
 109  *
 110  * @vb: pointer to struct &vb2_v4l2_buffer
 111  * @list: list of m2m buffers
 112  */
 113 struct v4l2_m2m_buffer {
 114         struct vb2_v4l2_buffer  vb;
 115         struct list_head        list;
 116 };
 117 
 118 /**
 119  * v4l2_m2m_get_curr_priv() - return driver private data for the currently
 120  * running instance or NULL if no instance is running
 121  *
 122  * @m2m_dev: opaque pointer to the internal data to handle M2M context
 123  */
 124 void *v4l2_m2m_get_curr_priv(struct v4l2_m2m_dev *m2m_dev);
 125 
 126 /**
 127  * v4l2_m2m_get_vq() - return vb2_queue for the given type
 128  *
 129  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 130  * @type: type of the V4L2 buffer, as defined by enum &v4l2_buf_type
 131  */
 132 struct vb2_queue *v4l2_m2m_get_vq(struct v4l2_m2m_ctx *m2m_ctx,
 133                                        enum v4l2_buf_type type);
 134 
 135 /**
 136  * v4l2_m2m_try_schedule() - check whether an instance is ready to be added to
 137  * the pending job queue and add it if so.
 138  *
 139  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 140  *
 141  * There are three basic requirements an instance has to meet to be able to run:
 142  * 1) at least one source buffer has to be queued,
 143  * 2) at least one destination buffer has to be queued,
 144  * 3) streaming has to be on.
 145  *
 146  * If a queue is buffered (for example a decoder hardware ringbuffer that has
 147  * to be drained before doing streamoff), allow scheduling without v4l2 buffers
 148  * on that queue.
 149  *
 150  * There may also be additional, custom requirements. In such case the driver
 151  * should supply a custom callback (job_ready in v4l2_m2m_ops) that should
 152  * return 1 if the instance is ready.
 153  * An example of the above could be an instance that requires more than one
 154  * src/dst buffer per transaction.
 155  */
 156 void v4l2_m2m_try_schedule(struct v4l2_m2m_ctx *m2m_ctx);
 157 
 158 /**
 159  * v4l2_m2m_job_finish() - inform the framework that a job has been finished
 160  * and have it clean up
 161  *
 162  * @m2m_dev: opaque pointer to the internal data to handle M2M context
 163  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 164  *
 165  * Called by a driver to yield back the device after it has finished with it.
 166  * Should be called as soon as possible after reaching a state which allows
 167  * other instances to take control of the device.
 168  *
 169  * This function has to be called only after &v4l2_m2m_ops->device_run
 170  * callback has been called on the driver. To prevent recursion, it should
 171  * not be called directly from the &v4l2_m2m_ops->device_run callback though.
 172  */
 173 void v4l2_m2m_job_finish(struct v4l2_m2m_dev *m2m_dev,
 174                          struct v4l2_m2m_ctx *m2m_ctx);
 175 
 176 static inline void
 177 v4l2_m2m_buf_done(struct vb2_v4l2_buffer *buf, enum vb2_buffer_state state)
 178 {
 179         vb2_buffer_done(&buf->vb2_buf, state);
 180 }
 181 
 182 /**
 183  * v4l2_m2m_reqbufs() - multi-queue-aware REQBUFS multiplexer
 184  *
 185  * @file: pointer to struct &file
 186  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 187  * @reqbufs: pointer to struct &v4l2_requestbuffers
 188  */
 189 int v4l2_m2m_reqbufs(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 190                      struct v4l2_requestbuffers *reqbufs);
 191 
 192 /**
 193  * v4l2_m2m_querybuf() - multi-queue-aware QUERYBUF multiplexer
 194  *
 195  * @file: pointer to struct &file
 196  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 197  * @buf: pointer to struct &v4l2_buffer
 198  *
 199  * See v4l2_m2m_mmap() documentation for details.
 200  */
 201 int v4l2_m2m_querybuf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 202                       struct v4l2_buffer *buf);
 203 
 204 /**
 205  * v4l2_m2m_qbuf() - enqueue a source or destination buffer, depending on
 206  * the type
 207  *
 208  * @file: pointer to struct &file
 209  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 210  * @buf: pointer to struct &v4l2_buffer
 211  */
 212 int v4l2_m2m_qbuf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 213                   struct v4l2_buffer *buf);
 214 
 215 /**
 216  * v4l2_m2m_dqbuf() - dequeue a source or destination buffer, depending on
 217  * the type
 218  *
 219  * @file: pointer to struct &file
 220  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 221  * @buf: pointer to struct &v4l2_buffer
 222  */
 223 int v4l2_m2m_dqbuf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 224                    struct v4l2_buffer *buf);
 225 
 226 /**
 227  * v4l2_m2m_prepare_buf() - prepare a source or destination buffer, depending on
 228  * the type
 229  *
 230  * @file: pointer to struct &file
 231  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 232  * @buf: pointer to struct &v4l2_buffer
 233  */
 234 int v4l2_m2m_prepare_buf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 235                          struct v4l2_buffer *buf);
 236 
 237 /**
 238  * v4l2_m2m_create_bufs() - create a source or destination buffer, depending
 239  * on the type
 240  *
 241  * @file: pointer to struct &file
 242  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 243  * @create: pointer to struct &v4l2_create_buffers
 244  */
 245 int v4l2_m2m_create_bufs(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 246                          struct v4l2_create_buffers *create);
 247 
 248 /**
 249  * v4l2_m2m_expbuf() - export a source or destination buffer, depending on
 250  * the type
 251  *
 252  * @file: pointer to struct &file
 253  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 254  * @eb: pointer to struct &v4l2_exportbuffer
 255  */
 256 int v4l2_m2m_expbuf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 257                    struct v4l2_exportbuffer *eb);
 258 
 259 /**
 260  * v4l2_m2m_streamon() - turn on streaming for a video queue
 261  *
 262  * @file: pointer to struct &file
 263  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 264  * @type: type of the V4L2 buffer, as defined by enum &v4l2_buf_type
 265  */
 266 int v4l2_m2m_streamon(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 267                       enum v4l2_buf_type type);
 268 
 269 /**
 270  * v4l2_m2m_streamoff() - turn off streaming for a video queue
 271  *
 272  * @file: pointer to struct &file
 273  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 274  * @type: type of the V4L2 buffer, as defined by enum &v4l2_buf_type
 275  */
 276 int v4l2_m2m_streamoff(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 277                        enum v4l2_buf_type type);
 278 
 279 /**
 280  * v4l2_m2m_poll() - poll replacement, for destination buffers only
 281  *
 282  * @file: pointer to struct &file
 283  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 284  * @wait: pointer to struct &poll_table_struct
 285  *
 286  * Call from the driver's poll() function. Will poll both queues. If a buffer
 287  * is available to dequeue (with dqbuf) from the source queue, this will
 288  * indicate that a non-blocking write can be performed, while read will be
 289  * returned in case of the destination queue.
 290  */
 291 __poll_t v4l2_m2m_poll(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 292                            struct poll_table_struct *wait);
 293 
 294 /**
 295  * v4l2_m2m_mmap() - source and destination queues-aware mmap multiplexer
 296  *
 297  * @file: pointer to struct &file
 298  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 299  * @vma: pointer to struct &vm_area_struct
 300  *
 301  * Call from driver's mmap() function. Will handle mmap() for both queues
 302  * seamlessly for videobuffer, which will receive normal per-queue offsets and
 303  * proper videobuf queue pointers. The differentiation is made outside videobuf
 304  * by adding a predefined offset to buffers from one of the queues and
 305  * subtracting it before passing it back to videobuf. Only drivers (and
 306  * thus applications) receive modified offsets.
 307  */
 308 int v4l2_m2m_mmap(struct file *file, struct v4l2_m2m_ctx *m2m_ctx,
 309                   struct vm_area_struct *vma);
 310 
 311 /**
 312  * v4l2_m2m_init() - initialize per-driver m2m data
 313  *
 314  * @m2m_ops: pointer to struct v4l2_m2m_ops
 315  *
 316  * Usually called from driver's ``probe()`` function.
 317  *
 318  * Return: returns an opaque pointer to the internal data to handle M2M context
 319  */
 320 struct v4l2_m2m_dev *v4l2_m2m_init(const struct v4l2_m2m_ops *m2m_ops);
 321 
 322 #if defined(CONFIG_MEDIA_CONTROLLER)
 323 void v4l2_m2m_unregister_media_controller(struct v4l2_m2m_dev *m2m_dev);
 324 int v4l2_m2m_register_media_controller(struct v4l2_m2m_dev *m2m_dev,
 325                         struct video_device *vdev, int function);
 326 #else
 327 static inline void
 328 v4l2_m2m_unregister_media_controller(struct v4l2_m2m_dev *m2m_dev)
 329 {
 330 }
 331 
 332 static inline int
 333 v4l2_m2m_register_media_controller(struct v4l2_m2m_dev *m2m_dev,
 334                 struct video_device *vdev, int function)
 335 {
 336         return 0;
 337 }
 338 #endif
 339 
 340 /**
 341  * v4l2_m2m_release() - cleans up and frees a m2m_dev structure
 342  *
 343  * @m2m_dev: opaque pointer to the internal data to handle M2M context
 344  *
 345  * Usually called from driver's ``remove()`` function.
 346  */
 347 void v4l2_m2m_release(struct v4l2_m2m_dev *m2m_dev);
 348 
 349 /**
 350  * v4l2_m2m_ctx_init() - allocate and initialize a m2m context
 351  *
 352  * @m2m_dev: opaque pointer to the internal data to handle M2M context
 353  * @drv_priv: driver's instance private data
 354  * @queue_init: a callback for queue type-specific initialization function
 355  *      to be used for initializing videobuf_queues
 356  *
 357  * Usually called from driver's ``open()`` function.
 358  */
 359 struct v4l2_m2m_ctx *v4l2_m2m_ctx_init(struct v4l2_m2m_dev *m2m_dev,
 360                 void *drv_priv,
 361                 int (*queue_init)(void *priv, struct vb2_queue *src_vq, struct vb2_queue *dst_vq));
 362 
 363 static inline void v4l2_m2m_set_src_buffered(struct v4l2_m2m_ctx *m2m_ctx,
 364                                              bool buffered)
 365 {
 366         m2m_ctx->out_q_ctx.buffered = buffered;
 367 }
 368 
 369 static inline void v4l2_m2m_set_dst_buffered(struct v4l2_m2m_ctx *m2m_ctx,
 370                                              bool buffered)
 371 {
 372         m2m_ctx->cap_q_ctx.buffered = buffered;
 373 }
 374 
 375 /**
 376  * v4l2_m2m_ctx_release() - release m2m context
 377  *
 378  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 379  *
 380  * Usually called from driver's release() function.
 381  */
 382 void v4l2_m2m_ctx_release(struct v4l2_m2m_ctx *m2m_ctx);
 383 
 384 /**
 385  * v4l2_m2m_buf_queue() - add a buffer to the proper ready buffers list.
 386  *
 387  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 388  * @vbuf: pointer to struct &vb2_v4l2_buffer
 389  *
 390  * Call from videobuf_queue_ops->ops->buf_queue, videobuf_queue_ops callback.
 391  */
 392 void v4l2_m2m_buf_queue(struct v4l2_m2m_ctx *m2m_ctx,
 393                         struct vb2_v4l2_buffer *vbuf);
 394 
 395 /**
 396  * v4l2_m2m_num_src_bufs_ready() - return the number of source buffers ready for
 397  * use
 398  *
 399  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 400  */
 401 static inline
 402 unsigned int v4l2_m2m_num_src_bufs_ready(struct v4l2_m2m_ctx *m2m_ctx)
 403 {
 404         return m2m_ctx->out_q_ctx.num_rdy;
 405 }
 406 
 407 /**
 408  * v4l2_m2m_num_dst_bufs_ready() - return the number of destination buffers
 409  * ready for use
 410  *
 411  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 412  */
 413 static inline
 414 unsigned int v4l2_m2m_num_dst_bufs_ready(struct v4l2_m2m_ctx *m2m_ctx)
 415 {
 416         return m2m_ctx->cap_q_ctx.num_rdy;
 417 }
 418 
 419 /**
 420  * v4l2_m2m_next_buf() - return next buffer from the list of ready buffers
 421  *
 422  * @q_ctx: pointer to struct @v4l2_m2m_queue_ctx
 423  */
 424 struct vb2_v4l2_buffer *v4l2_m2m_next_buf(struct v4l2_m2m_queue_ctx *q_ctx);
 425 
 426 /**
 427  * v4l2_m2m_next_src_buf() - return next source buffer from the list of ready
 428  * buffers
 429  *
 430  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 431  */
 432 static inline struct vb2_v4l2_buffer *
 433 v4l2_m2m_next_src_buf(struct v4l2_m2m_ctx *m2m_ctx)
 434 {
 435         return v4l2_m2m_next_buf(&m2m_ctx->out_q_ctx);
 436 }
 437 
 438 /**
 439  * v4l2_m2m_next_dst_buf() - return next destination buffer from the list of
 440  * ready buffers
 441  *
 442  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 443  */
 444 static inline struct vb2_v4l2_buffer *
 445 v4l2_m2m_next_dst_buf(struct v4l2_m2m_ctx *m2m_ctx)
 446 {
 447         return v4l2_m2m_next_buf(&m2m_ctx->cap_q_ctx);
 448 }
 449 
 450 /**
 451  * v4l2_m2m_last_buf() - return last buffer from the list of ready buffers
 452  *
 453  * @q_ctx: pointer to struct @v4l2_m2m_queue_ctx
 454  */
 455 struct vb2_v4l2_buffer *v4l2_m2m_last_buf(struct v4l2_m2m_queue_ctx *q_ctx);
 456 
 457 /**
 458  * v4l2_m2m_last_src_buf() - return last destination buffer from the list of
 459  * ready buffers
 460  *
 461  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 462  */
 463 static inline struct vb2_v4l2_buffer *
 464 v4l2_m2m_last_src_buf(struct v4l2_m2m_ctx *m2m_ctx)
 465 {
 466         return v4l2_m2m_last_buf(&m2m_ctx->out_q_ctx);
 467 }
 468 
 469 /**
 470  * v4l2_m2m_last_dst_buf() - return last destination buffer from the list of
 471  * ready buffers
 472  *
 473  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 474  */
 475 static inline struct vb2_v4l2_buffer *
 476 v4l2_m2m_last_dst_buf(struct v4l2_m2m_ctx *m2m_ctx)
 477 {
 478         return v4l2_m2m_last_buf(&m2m_ctx->cap_q_ctx);
 479 }
 480 
 481 /**
 482  * v4l2_m2m_for_each_dst_buf() - iterate over a list of destination ready
 483  * buffers
 484  *
 485  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 486  * @b: current buffer of type struct v4l2_m2m_buffer
 487  */
 488 #define v4l2_m2m_for_each_dst_buf(m2m_ctx, b)   \
 489         list_for_each_entry(b, &m2m_ctx->cap_q_ctx.rdy_queue, list)
 490 
 491 /**
 492  * v4l2_m2m_for_each_src_buf() - iterate over a list of source ready buffers
 493  *
 494  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 495  * @b: current buffer of type struct v4l2_m2m_buffer
 496  */
 497 #define v4l2_m2m_for_each_src_buf(m2m_ctx, b)   \
 498         list_for_each_entry(b, &m2m_ctx->out_q_ctx.rdy_queue, list)
 499 
 500 /**
 501  * v4l2_m2m_for_each_dst_buf_safe() - iterate over a list of destination ready
 502  * buffers safely
 503  *
 504  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 505  * @b: current buffer of type struct v4l2_m2m_buffer
 506  * @n: used as temporary storage
 507  */
 508 #define v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, b, n)   \
 509         list_for_each_entry_safe(b, n, &m2m_ctx->cap_q_ctx.rdy_queue, list)
 510 
 511 /**
 512  * v4l2_m2m_for_each_src_buf_safe() - iterate over a list of source ready
 513  * buffers safely
 514  *
 515  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 516  * @b: current buffer of type struct v4l2_m2m_buffer
 517  * @n: used as temporary storage
 518  */
 519 #define v4l2_m2m_for_each_src_buf_safe(m2m_ctx, b, n)   \
 520         list_for_each_entry_safe(b, n, &m2m_ctx->out_q_ctx.rdy_queue, list)
 521 
 522 /**
 523  * v4l2_m2m_get_src_vq() - return vb2_queue for source buffers
 524  *
 525  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 526  */
 527 static inline
 528 struct vb2_queue *v4l2_m2m_get_src_vq(struct v4l2_m2m_ctx *m2m_ctx)
 529 {
 530         return &m2m_ctx->out_q_ctx.q;
 531 }
 532 
 533 /**
 534  * v4l2_m2m_get_dst_vq() - return vb2_queue for destination buffers
 535  *
 536  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 537  */
 538 static inline
 539 struct vb2_queue *v4l2_m2m_get_dst_vq(struct v4l2_m2m_ctx *m2m_ctx)
 540 {
 541         return &m2m_ctx->cap_q_ctx.q;
 542 }
 543 
 544 /**
 545  * v4l2_m2m_buf_remove() - take off a buffer from the list of ready buffers and
 546  * return it
 547  *
 548  * @q_ctx: pointer to struct @v4l2_m2m_queue_ctx
 549  */
 550 struct vb2_v4l2_buffer *v4l2_m2m_buf_remove(struct v4l2_m2m_queue_ctx *q_ctx);
 551 
 552 /**
 553  * v4l2_m2m_src_buf_remove() - take off a source buffer from the list of ready
 554  * buffers and return it
 555  *
 556  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 557  */
 558 static inline struct vb2_v4l2_buffer *
 559 v4l2_m2m_src_buf_remove(struct v4l2_m2m_ctx *m2m_ctx)
 560 {
 561         return v4l2_m2m_buf_remove(&m2m_ctx->out_q_ctx);
 562 }
 563 
 564 /**
 565  * v4l2_m2m_dst_buf_remove() - take off a destination buffer from the list of
 566  * ready buffers and return it
 567  *
 568  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 569  */
 570 static inline struct vb2_v4l2_buffer *
 571 v4l2_m2m_dst_buf_remove(struct v4l2_m2m_ctx *m2m_ctx)
 572 {
 573         return v4l2_m2m_buf_remove(&m2m_ctx->cap_q_ctx);
 574 }
 575 
 576 /**
 577  * v4l2_m2m_buf_remove_by_buf() - take off exact buffer from the list of ready
 578  * buffers
 579  *
 580  * @q_ctx: pointer to struct @v4l2_m2m_queue_ctx
 581  * @vbuf: the buffer to be removed
 582  */
 583 void v4l2_m2m_buf_remove_by_buf(struct v4l2_m2m_queue_ctx *q_ctx,
 584                                 struct vb2_v4l2_buffer *vbuf);
 585 
 586 /**
 587  * v4l2_m2m_src_buf_remove_by_buf() - take off exact source buffer from the list
 588  * of ready buffers
 589  *
 590  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 591  * @vbuf: the buffer to be removed
 592  */
 593 static inline void v4l2_m2m_src_buf_remove_by_buf(struct v4l2_m2m_ctx *m2m_ctx,
 594                                                   struct vb2_v4l2_buffer *vbuf)
 595 {
 596         v4l2_m2m_buf_remove_by_buf(&m2m_ctx->out_q_ctx, vbuf);
 597 }
 598 
 599 /**
 600  * v4l2_m2m_dst_buf_remove_by_buf() - take off exact destination buffer from the
 601  * list of ready buffers
 602  *
 603  * @m2m_ctx: m2m context assigned to the instance given by struct &v4l2_m2m_ctx
 604  * @vbuf: the buffer to be removed
 605  */
 606 static inline void v4l2_m2m_dst_buf_remove_by_buf(struct v4l2_m2m_ctx *m2m_ctx,
 607                                                   struct vb2_v4l2_buffer *vbuf)
 608 {
 609         v4l2_m2m_buf_remove_by_buf(&m2m_ctx->cap_q_ctx, vbuf);
 610 }
 611 
 612 struct vb2_v4l2_buffer *
 613 v4l2_m2m_buf_remove_by_idx(struct v4l2_m2m_queue_ctx *q_ctx, unsigned int idx);
 614 
 615 static inline struct vb2_v4l2_buffer *
 616 v4l2_m2m_src_buf_remove_by_idx(struct v4l2_m2m_ctx *m2m_ctx, unsigned int idx)
 617 {
 618         return v4l2_m2m_buf_remove_by_idx(&m2m_ctx->out_q_ctx, idx);
 619 }
 620 
 621 static inline struct vb2_v4l2_buffer *
 622 v4l2_m2m_dst_buf_remove_by_idx(struct v4l2_m2m_ctx *m2m_ctx, unsigned int idx)
 623 {
 624         return v4l2_m2m_buf_remove_by_idx(&m2m_ctx->cap_q_ctx, idx);
 625 }
 626 
 627 /**
 628  * v4l2_m2m_buf_copy_metadata() - copy buffer metadata from
 629  * the output buffer to the capture buffer
 630  *
 631  * @out_vb: the output buffer that is the source of the metadata.
 632  * @cap_vb: the capture buffer that will receive the metadata.
 633  * @copy_frame_flags: copy the KEY/B/PFRAME flags as well.
 634  *
 635  * This helper function copies the timestamp, timecode (if the TIMECODE
 636  * buffer flag was set), field and the TIMECODE, KEYFRAME, BFRAME, PFRAME
 637  * and TSTAMP_SRC_MASK flags from @out_vb to @cap_vb.
 638  *
 639  * If @copy_frame_flags is false, then the KEYFRAME, BFRAME and PFRAME
 640  * flags are not copied. This is typically needed for encoders that
 641  * set this bits explicitly.
 642  */
 643 void v4l2_m2m_buf_copy_metadata(const struct vb2_v4l2_buffer *out_vb,
 644                                 struct vb2_v4l2_buffer *cap_vb,
 645                                 bool copy_frame_flags);
 646 
 647 /* v4l2 request helper */
 648 
 649 void v4l2_m2m_request_queue(struct media_request *req);
 650 
 651 /* v4l2 ioctl helpers */
 652 
 653 int v4l2_m2m_ioctl_reqbufs(struct file *file, void *priv,
 654                                 struct v4l2_requestbuffers *rb);
 655 int v4l2_m2m_ioctl_create_bufs(struct file *file, void *fh,
 656                                 struct v4l2_create_buffers *create);
 657 int v4l2_m2m_ioctl_querybuf(struct file *file, void *fh,
 658                                 struct v4l2_buffer *buf);
 659 int v4l2_m2m_ioctl_expbuf(struct file *file, void *fh,
 660                                 struct v4l2_exportbuffer *eb);
 661 int v4l2_m2m_ioctl_qbuf(struct file *file, void *fh,
 662                                 struct v4l2_buffer *buf);
 663 int v4l2_m2m_ioctl_dqbuf(struct file *file, void *fh,
 664                                 struct v4l2_buffer *buf);
 665 int v4l2_m2m_ioctl_prepare_buf(struct file *file, void *fh,
 666                                struct v4l2_buffer *buf);
 667 int v4l2_m2m_ioctl_streamon(struct file *file, void *fh,
 668                                 enum v4l2_buf_type type);
 669 int v4l2_m2m_ioctl_streamoff(struct file *file, void *fh,
 670                                 enum v4l2_buf_type type);
 671 int v4l2_m2m_ioctl_try_encoder_cmd(struct file *file, void *fh,
 672                                    struct v4l2_encoder_cmd *ec);
 673 int v4l2_m2m_ioctl_try_decoder_cmd(struct file *file, void *fh,
 674                                    struct v4l2_decoder_cmd *dc);
 675 int v4l2_m2m_fop_mmap(struct file *file, struct vm_area_struct *vma);
 676 __poll_t v4l2_m2m_fop_poll(struct file *file, poll_table *wait);
 677 
 678 #endif /* _MEDIA_V4L2_MEM2MEM_H */
 679 

/* [<][>][^][v][top][bottom][index][help] */