FFmpeg
v4l2.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2000,2001 Fabrice Bellard
3  * Copyright (c) 2006 Luca Abeni
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Video4Linux2 grab interface
25  *
26  * Part of this file is based on the V4L2 video capture example
27  * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
28  *
29  * Thanks to Michael Niedermayer for providing the mapping between
30  * V4L2_PIX_FMT_* and AV_PIX_FMT_*
31  */
32 
33 #include <stdatomic.h>
34 
35 #include "v4l2-common.h"
36 #include <dirent.h>
37 
38 #if CONFIG_LIBV4L2
39 #include <libv4l2.h>
40 #endif
41 
42 static const int desired_video_buffers = 256;
43 
44 #define V4L_ALLFORMATS 3
45 #define V4L_RAWFORMATS 1
46 #define V4L_COMPFORMATS 2
47 
48 /**
49  * Return timestamps to the user exactly as returned by the kernel
50  */
51 #define V4L_TS_DEFAULT 0
52 /**
53  * Autodetect the kind of timestamps returned by the kernel and convert to
54  * absolute (wall clock) timestamps.
55  */
56 #define V4L_TS_ABS 1
57 /**
58  * Assume kernel timestamps are from the monotonic clock and convert to
59  * absolute timestamps.
60  */
61 #define V4L_TS_MONO2ABS 2
62 
63 /**
64  * Once the kind of timestamps returned by the kernel have been detected,
65  * the value of the timefilter (NULL or not) determines whether a conversion
66  * takes place.
67  */
68 #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
69 
70 struct video_data {
71  AVClass *class;
72  int fd;
73  int pixelformat; /* V4L2_PIX_FMT_* */
74  int width, height;
78  int ts_mode;
80  int64_t last_time_m;
81 
82  int buffers;
84  void **buf_start;
85  unsigned int *buf_len;
86  char *standard;
87  v4l2_std_id std_id;
88  int channel;
89  char *pixel_format; /**< Set by a private option. */
90  int list_format; /**< Set by a private option. */
91  int list_standard; /**< Set by a private option. */
92  char *framerate; /**< Set by a private option. */
93 
95  int (*open_f)(const char *file, int oflag, ...);
96  int (*close_f)(int fd);
97  int (*dup_f)(int fd);
98 #ifdef __GLIBC__
99  int (*ioctl_f)(int fd, unsigned long int request, ...);
100 #else
101  int (*ioctl_f)(int fd, int request, ...);
102 #endif
103  ssize_t (*read_f)(int fd, void *buffer, size_t n);
104  void *(*mmap_f)(void *start, size_t length, int prot, int flags, int fd, int64_t offset);
105  int (*munmap_f)(void *_start, size_t length);
106 };
107 
108 struct buff_data {
109  struct video_data *s;
110  int index;
111 };
112 
113 static int device_open(AVFormatContext *ctx, const char* device_path)
114 {
115  struct video_data *s = ctx->priv_data;
116  struct v4l2_capability cap;
117  int fd;
118  int err;
119  int flags = O_RDWR;
120 
121 #define SET_WRAPPERS(prefix) do { \
122  s->open_f = prefix ## open; \
123  s->close_f = prefix ## close; \
124  s->dup_f = prefix ## dup; \
125  s->ioctl_f = prefix ## ioctl; \
126  s->read_f = prefix ## read; \
127  s->mmap_f = prefix ## mmap; \
128  s->munmap_f = prefix ## munmap; \
129 } while (0)
130 
131  if (s->use_libv4l2) {
132 #if CONFIG_LIBV4L2
133  SET_WRAPPERS(v4l2_);
134 #else
135  av_log(ctx, AV_LOG_ERROR, "libavdevice is not built with libv4l2 support.\n");
136  return AVERROR(EINVAL);
137 #endif
138  } else {
139  SET_WRAPPERS();
140  }
141 
142 #define v4l2_open s->open_f
143 #define v4l2_close s->close_f
144 #define v4l2_dup s->dup_f
145 #define v4l2_ioctl s->ioctl_f
146 #define v4l2_read s->read_f
147 #define v4l2_mmap s->mmap_f
148 #define v4l2_munmap s->munmap_f
149 
150  if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
151  flags |= O_NONBLOCK;
152  }
153 
154  fd = v4l2_open(device_path, flags, 0);
155  if (fd < 0) {
156  err = AVERROR(errno);
157  av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
158  device_path, av_err2str(err));
159  return err;
160  }
161 
162  if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
163  err = AVERROR(errno);
164  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
165  av_err2str(err));
166  goto fail;
167  }
168 
169  av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
170  fd, cap.capabilities);
171 
172  if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
173  av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
174  err = AVERROR(ENODEV);
175  goto fail;
176  }
177 
178  if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
180  "The device does not support the streaming I/O method.\n");
181  err = AVERROR(ENOSYS);
182  goto fail;
183  }
184 
185  return fd;
186 
187 fail:
188  v4l2_close(fd);
189  return err;
190 }
191 
192 static int device_init(AVFormatContext *ctx, int *width, int *height,
193  uint32_t pixelformat)
194 {
195  struct video_data *s = ctx->priv_data;
196  struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
197  int res = 0;
198 
199  fmt.fmt.pix.width = *width;
200  fmt.fmt.pix.height = *height;
201  fmt.fmt.pix.pixelformat = pixelformat;
202  fmt.fmt.pix.field = V4L2_FIELD_ANY;
203 
204  /* Some drivers will fail and return EINVAL when the pixelformat
205  is not supported (even if type field is valid and supported) */
206  if (v4l2_ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0)
207  res = AVERROR(errno);
208 
209  if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
211  "The V4L2 driver changed the video from %dx%d to %dx%d\n",
212  *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
213  *width = fmt.fmt.pix.width;
214  *height = fmt.fmt.pix.height;
215  }
216 
217  if (pixelformat != fmt.fmt.pix.pixelformat) {
219  "The V4L2 driver changed the pixel format "
220  "from 0x%08X to 0x%08X\n",
221  pixelformat, fmt.fmt.pix.pixelformat);
222  res = AVERROR(EINVAL);
223  }
224 
225  if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
227  "The V4L2 driver is using the interlaced mode\n");
228  s->interlaced = 1;
229  }
230 
231  return res;
232 }
233 
234 static int first_field(const struct video_data *s)
235 {
236  int res;
237  v4l2_std_id std;
238 
239  res = v4l2_ioctl(s->fd, VIDIOC_G_STD, &std);
240  if (res < 0)
241  return 0;
242  if (std & V4L2_STD_NTSC)
243  return 0;
244 
245  return 1;
246 }
247 
248 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
249 static void list_framesizes(AVFormatContext *ctx, uint32_t pixelformat)
250 {
251  const struct video_data *s = ctx->priv_data;
252  struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
253 
254  while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
255  switch (vfse.type) {
256  case V4L2_FRMSIZE_TYPE_DISCRETE:
257  av_log(ctx, AV_LOG_INFO, " %ux%u",
258  vfse.discrete.width, vfse.discrete.height);
259  break;
260  case V4L2_FRMSIZE_TYPE_CONTINUOUS:
261  case V4L2_FRMSIZE_TYPE_STEPWISE:
262  av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
263  vfse.stepwise.min_width,
264  vfse.stepwise.max_width,
265  vfse.stepwise.step_width,
266  vfse.stepwise.min_height,
267  vfse.stepwise.max_height,
268  vfse.stepwise.step_height);
269  }
270  vfse.index++;
271  }
272 }
273 #endif
274 
276 {
277  const struct video_data *s = ctx->priv_data;
278  struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
279 
280  while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) {
281  enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat);
282  enum AVPixelFormat pix_fmt = ff_fmt_v4l2ff(vfd.pixelformat, codec_id);
283 
284  vfd.index++;
285 
286  if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
287  type & V4L_RAWFORMATS) {
288  const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
289  av_log(ctx, AV_LOG_INFO, "Raw : %11s : %20s :",
290  fmt_name ? fmt_name : "Unsupported",
291  vfd.description);
292  } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
293  type & V4L_COMPFORMATS) {
295  av_log(ctx, AV_LOG_INFO, "Compressed: %11s : %20s :",
296  desc ? desc->name : "Unsupported",
297  vfd.description);
298  } else {
299  continue;
300  }
301 
302 #ifdef V4L2_FMT_FLAG_EMULATED
303  if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
304  av_log(ctx, AV_LOG_INFO, " Emulated :");
305 #endif
306 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
307  list_framesizes(ctx, vfd.pixelformat);
308 #endif
309  av_log(ctx, AV_LOG_INFO, "\n");
310  }
311 }
312 
314 {
315  int ret;
316  struct video_data *s = ctx->priv_data;
317  struct v4l2_standard standard;
318 
319  if (s->std_id == 0)
320  return;
321 
322  for (standard.index = 0; ; standard.index++) {
323  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
324  ret = AVERROR(errno);
325  if (ret == AVERROR(EINVAL)) {
326  break;
327  } else {
328  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
329  return;
330  }
331  }
332  av_log(ctx, AV_LOG_INFO, "%2d, %16"PRIx64", %s\n",
333  standard.index, (uint64_t)standard.id, standard.name);
334  }
335 }
336 
338 {
339  int i, res;
340  struct video_data *s = ctx->priv_data;
341  struct v4l2_requestbuffers req = {
342  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
343  .count = desired_video_buffers,
344  .memory = V4L2_MEMORY_MMAP
345  };
346 
347  if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
348  res = AVERROR(errno);
349  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
350  return res;
351  }
352 
353  if (req.count < 2) {
354  av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
355  return AVERROR(ENOMEM);
356  }
357  s->buffers = req.count;
358  s->buf_start = av_malloc_array(s->buffers, sizeof(void *));
359  if (!s->buf_start) {
360  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
361  return AVERROR(ENOMEM);
362  }
363  s->buf_len = av_malloc_array(s->buffers, sizeof(unsigned int));
364  if (!s->buf_len) {
365  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
366  av_freep(&s->buf_start);
367  return AVERROR(ENOMEM);
368  }
369 
370  for (i = 0; i < req.count; i++) {
371  struct v4l2_buffer buf = {
372  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
373  .index = i,
374  .memory = V4L2_MEMORY_MMAP
375  };
376  if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
377  res = AVERROR(errno);
378  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
379  return res;
380  }
381 
382  s->buf_len[i] = buf.length;
383  if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
385  "buf_len[%d] = %d < expected frame size %d\n",
386  i, s->buf_len[i], s->frame_size);
387  return AVERROR(ENOMEM);
388  }
389  s->buf_start[i] = v4l2_mmap(NULL, buf.length,
390  PROT_READ | PROT_WRITE, MAP_SHARED,
391  s->fd, buf.m.offset);
392 
393  if (s->buf_start[i] == MAP_FAILED) {
394  res = AVERROR(errno);
395  av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
396  return res;
397  }
398  }
399 
400  return 0;
401 }
402 
403 static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
404 {
405  int res = 0;
406 
407  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, buf) < 0) {
408  res = AVERROR(errno);
409  av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
410  } else {
411  atomic_fetch_add(&s->buffers_queued, 1);
412  }
413 
414  return res;
415 }
416 
417 static void mmap_release_buffer(void *opaque, uint8_t *data)
418 {
419  struct v4l2_buffer buf = { 0 };
420  struct buff_data *buf_descriptor = opaque;
421  struct video_data *s = buf_descriptor->s;
422 
423  buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
424  buf.memory = V4L2_MEMORY_MMAP;
425  buf.index = buf_descriptor->index;
426  av_free(buf_descriptor);
427 
428  enqueue_buffer(s, &buf);
429 }
430 
431 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
432 static int64_t av_gettime_monotonic(void)
433 {
434  return av_gettime_relative();
435 }
436 #endif
437 
439 {
440  struct video_data *s = ctx->priv_data;
441  int64_t now;
442 
443  now = av_gettime();
444  if (s->ts_mode == V4L_TS_ABS &&
445  ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
446  av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
447  s->ts_mode = V4L_TS_CONVERT_READY;
448  return 0;
449  }
450 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
451  if (ctx->streams[0]->avg_frame_rate.num) {
452  now = av_gettime_monotonic();
453  if (s->ts_mode == V4L_TS_MONO2ABS ||
454  (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
455  AVRational tb = {AV_TIME_BASE, 1};
456  int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate);
457  av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
458  /* microseconds instead of seconds, MHz instead of Hz */
459  s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
460  if (!s->timefilter)
461  return AVERROR(ENOMEM);
462  s->ts_mode = V4L_TS_CONVERT_READY;
463  return 0;
464  }
465  }
466 #endif
467  av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
468  return AVERROR(EIO);
469 }
470 
471 static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
472 {
473  struct video_data *s = ctx->priv_data;
474 
475  if (s->ts_mode) {
476  int r = init_convert_timestamp(ctx, *ts);
477  if (r < 0)
478  return r;
479  }
480 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
481  if (s->timefilter) {
482  int64_t nowa = av_gettime();
483  int64_t nowm = av_gettime_monotonic();
484  ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
485  s->last_time_m = nowm;
486  *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
487  }
488 #endif
489  return 0;
490 }
491 
493 {
494  struct video_data *s = ctx->priv_data;
495  struct v4l2_buffer buf = {
496  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
497  .memory = V4L2_MEMORY_MMAP
498  };
499  struct timeval buf_ts;
500  int res;
501 
502  pkt->size = 0;
503 
504  /* FIXME: Some special treatment might be needed in case of loss of signal... */
505  while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
506  if (res < 0) {
507  if (errno == EAGAIN)
508  return AVERROR(EAGAIN);
509 
510  res = AVERROR(errno);
511  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n",
512  av_err2str(res));
513  return res;
514  }
515 
516  buf_ts = buf.timestamp;
517 
518  if (buf.index >= s->buffers) {
519  av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
520  return AVERROR(EINVAL);
521  }
522  atomic_fetch_add(&s->buffers_queued, -1);
523  // always keep at least one buffer queued
524  av_assert0(atomic_load(&s->buffers_queued) >= 1);
525 
526 #ifdef V4L2_BUF_FLAG_ERROR
527  if (buf.flags & V4L2_BUF_FLAG_ERROR) {
529  "Dequeued v4l2 buffer contains corrupted data (%d bytes).\n",
530  buf.bytesused);
531  buf.bytesused = 0;
532  } else
533 #endif
534  {
535  /* CPIA is a compressed format and we don't know the exact number of bytes
536  * used by a frame, so set it here as the driver announces it. */
538  s->frame_size = buf.bytesused;
539 
540  if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
542  "Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n",
543  buf.bytesused, s->frame_size, buf.flags);
544  enqueue_buffer(s, &buf);
545  return AVERROR_INVALIDDATA;
546  }
547  }
548 
549  /* Image is at s->buff_start[buf.index] */
550  if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
551  /* when we start getting low on queued buffers, fall back on copying data */
552  res = av_new_packet(pkt, buf.bytesused);
553  if (res < 0) {
554  av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
555  enqueue_buffer(s, &buf);
556  return res;
557  }
558  memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
559 
560  res = enqueue_buffer(s, &buf);
561  if (res) {
563  return res;
564  }
565  } else {
566  struct buff_data *buf_descriptor;
567 
568  pkt->data = s->buf_start[buf.index];
569  pkt->size = buf.bytesused;
570 
571  buf_descriptor = av_malloc(sizeof(struct buff_data));
572  if (!buf_descriptor) {
573  /* Something went wrong... Since av_malloc() failed, we cannot even
574  * allocate a buffer for memcpying into it
575  */
576  av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
577  enqueue_buffer(s, &buf);
578 
579  return AVERROR(ENOMEM);
580  }
581  buf_descriptor->index = buf.index;
582  buf_descriptor->s = s;
583 
585  buf_descriptor, 0);
586  if (!pkt->buf) {
587  av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n");
588  enqueue_buffer(s, &buf);
589  av_freep(&buf_descriptor);
590  return AVERROR(ENOMEM);
591  }
592  }
593  pkt->pts = buf_ts.tv_sec * INT64_C(1000000) + buf_ts.tv_usec;
595 
596  return pkt->size;
597 }
598 
600 {
601  struct video_data *s = ctx->priv_data;
602  enum v4l2_buf_type type;
603  int i, res;
604 
605  for (i = 0; i < s->buffers; i++) {
606  struct v4l2_buffer buf = {
607  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
608  .index = i,
609  .memory = V4L2_MEMORY_MMAP
610  };
611 
612  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
613  res = AVERROR(errno);
614  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
615  av_err2str(res));
616  return res;
617  }
618  }
619  atomic_store(&s->buffers_queued, s->buffers);
620 
621  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
622  if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
623  res = AVERROR(errno);
624  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n",
625  av_err2str(res));
626  return res;
627  }
628 
629  return 0;
630 }
631 
632 static void mmap_close(struct video_data *s)
633 {
634  enum v4l2_buf_type type;
635  int i;
636 
637  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
638  /* We do not check for the result, because we could
639  * not do anything about it anyway...
640  */
641  v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
642  for (i = 0; i < s->buffers; i++) {
643  v4l2_munmap(s->buf_start[i], s->buf_len[i]);
644  }
645  av_freep(&s->buf_start);
646  av_freep(&s->buf_len);
647 }
648 
650 {
651  struct video_data *s = ctx->priv_data;
652  struct v4l2_standard standard = { 0 };
653  struct v4l2_streamparm streamparm = { 0 };
654  struct v4l2_fract *tpf;
655  AVRational framerate_q = { 0 };
656  int i, ret;
657 
658  if (s->framerate &&
659  (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
660  av_log(ctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
661  s->framerate);
662  return ret;
663  }
664 
665  if (s->standard) {
666  if (s->std_id) {
667  ret = 0;
668  av_log(ctx, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
669  /* set tv standard */
670  for (i = 0; ; i++) {
671  standard.index = i;
672  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
673  ret = AVERROR(errno);
674  break;
675  }
676  if (!av_strcasecmp(standard.name, s->standard))
677  break;
678  }
679  if (ret < 0) {
680  av_log(ctx, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
681  return ret;
682  }
683 
684  if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
685  ret = AVERROR(errno);
686  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
687  return ret;
688  }
689  } else {
691  "This device does not support any standard\n");
692  }
693  }
694 
695  /* get standard */
696  if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
697  tpf = &standard.frameperiod;
698  for (i = 0; ; i++) {
699  standard.index = i;
700  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
701  ret = AVERROR(errno);
702  if (ret == AVERROR(EINVAL)
703 #ifdef ENODATA
704  || ret == AVERROR(ENODATA)
705 #endif
706  ) {
707  tpf = &streamparm.parm.capture.timeperframe;
708  break;
709  }
710  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
711  return ret;
712  }
713  if (standard.id == s->std_id) {
715  "Current standard: %s, id: %"PRIx64", frameperiod: %d/%d\n",
716  standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
717  break;
718  }
719  }
720  } else {
721  tpf = &streamparm.parm.capture.timeperframe;
722  }
723 
724  streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
725  if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
726  ret = AVERROR(errno);
727  av_log(ctx, AV_LOG_WARNING, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
728  } else if (framerate_q.num && framerate_q.den) {
729  if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
730  tpf = &streamparm.parm.capture.timeperframe;
731 
732  av_log(ctx, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
733  framerate_q.den, framerate_q.num);
734  tpf->numerator = framerate_q.den;
735  tpf->denominator = framerate_q.num;
736 
737  if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
738  ret = AVERROR(errno);
739  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n",
740  av_err2str(ret));
741  return ret;
742  }
743 
744  if (framerate_q.num != tpf->denominator ||
745  framerate_q.den != tpf->numerator) {
747  "The driver changed the time per frame from "
748  "%d/%d to %d/%d\n",
749  framerate_q.den, framerate_q.num,
750  tpf->numerator, tpf->denominator);
751  }
752  } else {
754  "The driver does not permit changing the time per frame\n");
755  }
756  }
757  if (tpf->denominator > 0 && tpf->numerator > 0) {
758  ctx->streams[0]->avg_frame_rate.num = tpf->denominator;
759  ctx->streams[0]->avg_frame_rate.den = tpf->numerator;
761  } else
762  av_log(ctx, AV_LOG_WARNING, "Time per frame unknown\n");
763 
764  return 0;
765 }
766 
768  enum AVPixelFormat pix_fmt,
769  int *width,
770  int *height,
771  uint32_t *desired_format,
772  enum AVCodecID *codec_id)
773 {
774  int ret, i;
775 
776  *desired_format = ff_fmt_ff2v4l(pix_fmt, ctx->video_codec_id);
777 
778  if (*desired_format) {
779  ret = device_init(ctx, width, height, *desired_format);
780  if (ret < 0) {
781  *desired_format = 0;
782  if (ret != AVERROR(EINVAL))
783  return ret;
784  }
785  }
786 
787  if (!*desired_format) {
791  av_log(ctx, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
793  (char *)av_x_if_null(av_get_pix_fmt_name(ff_fmt_conversion_table[i].ff_fmt), "none"));
794 
795  *desired_format = ff_fmt_conversion_table[i].v4l2_fmt;
796  ret = device_init(ctx, width, height, *desired_format);
797  if (ret >= 0)
798  break;
799  else if (ret != AVERROR(EINVAL))
800  return ret;
801  *desired_format = 0;
802  }
803  }
804 
805  if (*desired_format == 0) {
806  av_log(ctx, AV_LOG_ERROR, "Cannot find a proper format for "
807  "codec '%s' (id %d), pixel format '%s' (id %d)\n",
809  (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
810  ret = AVERROR(EINVAL);
811  }
812  }
813 
814  *codec_id = ff_fmt_v4l2codec(*desired_format);
816  return ret;
817 }
818 
819 static int v4l2_read_probe(const AVProbeData *p)
820 {
821  if (av_strstart(p->filename, "/dev/video", NULL))
822  return AVPROBE_SCORE_MAX - 1;
823  return 0;
824 }
825 
827 {
828  struct video_data *s = ctx->priv_data;
829  AVStream *st;
830  int res = 0;
831  uint32_t desired_format;
834  struct v4l2_input input = { 0 };
835 
837  if (!st)
838  return AVERROR(ENOMEM);
839 
840 #if CONFIG_LIBV4L2
841  /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL
842  and errors will get sent to stderr */
843  if (s->use_libv4l2)
844  v4l2_log_file = fopen("/dev/null", "w");
845 #endif
846 
847  s->fd = device_open(ctx, ctx->url);
848  if (s->fd < 0)
849  return s->fd;
850 
851  if (s->channel != -1) {
852  /* set video input */
853  av_log(ctx, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
854  if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
855  res = AVERROR(errno);
856  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
857  goto fail;
858  }
859  } else {
860  /* get current video input */
861  if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) {
862  res = AVERROR(errno);
863  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res));
864  goto fail;
865  }
866  }
867 
868  /* enum input */
869  input.index = s->channel;
870  if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
871  res = AVERROR(errno);
872  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
873  goto fail;
874  }
875  s->std_id = input.std;
876  av_log(ctx, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s, input_std: %"PRIx64"\n",
877  s->channel, input.name, (uint64_t)input.std);
878 
879  if (s->list_format) {
880  list_formats(ctx, s->list_format);
881  res = AVERROR_EXIT;
882  goto fail;
883  }
884 
885  if (s->list_standard) {
887  res = AVERROR_EXIT;
888  goto fail;
889  }
890 
891  avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
892 
893  if (s->pixel_format) {
894  const AVCodecDescriptor *desc = avcodec_descriptor_get_by_name(s->pixel_format);
895 
896  if (desc)
897  ctx->video_codec_id = desc->id;
898 
899  pix_fmt = av_get_pix_fmt(s->pixel_format);
900 
901  if (pix_fmt == AV_PIX_FMT_NONE && !desc) {
902  av_log(ctx, AV_LOG_ERROR, "No such input format: %s.\n",
903  s->pixel_format);
904 
905  res = AVERROR(EINVAL);
906  goto fail;
907  }
908  }
909 
910  if (!s->width && !s->height) {
911  struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
912 
914  "Querying the device for the current frame size\n");
915  if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
916  res = AVERROR(errno);
917  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n",
918  av_err2str(res));
919  goto fail;
920  }
921 
922  s->width = fmt.fmt.pix.width;
923  s->height = fmt.fmt.pix.height;
925  "Setting frame size to %dx%d\n", s->width, s->height);
926  }
927 
928  res = device_try_init(ctx, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
929  if (res < 0)
930  goto fail;
931 
932  /* If no pixel_format was specified, the codec_id was not known up
933  * until now. Set video_codec_id in the context, as codec_id will
934  * not be available outside this function
935  */
938 
939  if ((res = av_image_check_size(s->width, s->height, 0, ctx)) < 0)
940  goto fail;
941 
942  s->pixelformat = desired_format;
943 
944  if ((res = v4l2_set_parameters(ctx)) < 0)
945  goto fail;
946 
947  st->codecpar->format = ff_fmt_v4l2ff(desired_format, codec_id);
948  if (st->codecpar->format != AV_PIX_FMT_NONE)
949  s->frame_size = av_image_get_buffer_size(st->codecpar->format,
950  s->width, s->height, 1);
951 
952  if ((res = mmap_init(ctx)) ||
953  (res = mmap_start(ctx)) < 0)
954  goto fail;
955 
956  s->top_field_first = first_field(s);
957 
959  st->codecpar->codec_id = codec_id;
961  st->codecpar->codec_tag =
963  else if (codec_id == AV_CODEC_ID_H264) {
965  }
966  if (desired_format == V4L2_PIX_FMT_YVU420)
967  st->codecpar->codec_tag = MKTAG('Y', 'V', '1', '2');
968  else if (desired_format == V4L2_PIX_FMT_YVU410)
969  st->codecpar->codec_tag = MKTAG('Y', 'V', 'U', '9');
970  st->codecpar->width = s->width;
971  st->codecpar->height = s->height;
972  if (st->avg_frame_rate.den)
973  st->codecpar->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
974 
975  return 0;
976 
977 fail:
978  v4l2_close(s->fd);
979  return res;
980 }
981 
983 {
984 #if FF_API_CODED_FRAME && FF_API_LAVF_AVCTX
986  struct video_data *s = ctx->priv_data;
987  AVFrame *frame = ctx->streams[0]->codec->coded_frame;
989 #endif
990  int res;
991 
992  if ((res = mmap_read_frame(ctx, pkt)) < 0) {
993  return res;
994  }
995 
996 #if FF_API_CODED_FRAME && FF_API_LAVF_AVCTX
998  if (frame && s->interlaced) {
999  frame->interlaced_frame = 1;
1000  frame->top_field_first = s->top_field_first;
1001  }
1003 #endif
1004 
1005  return pkt->size;
1006 }
1007 
1009 {
1010  struct video_data *s = ctx->priv_data;
1011 
1012  if (atomic_load(&s->buffers_queued) != s->buffers)
1013  av_log(ctx, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
1014  "close.\n");
1015 
1016  mmap_close(s);
1017 
1018  v4l2_close(s->fd);
1019  return 0;
1020 }
1021 
1022 static int v4l2_is_v4l_dev(const char *name)
1023 {
1024  return !strncmp(name, "video", 5) ||
1025  !strncmp(name, "radio", 5) ||
1026  !strncmp(name, "vbi", 3) ||
1027  !strncmp(name, "v4l-subdev", 10);
1028 }
1029 
1031 {
1032  struct video_data *s = ctx->priv_data;
1033  DIR *dir;
1034  struct dirent *entry;
1035  AVDeviceInfo *device = NULL;
1036  struct v4l2_capability cap;
1037  int ret = 0;
1038 
1039  if (!device_list)
1040  return AVERROR(EINVAL);
1041 
1042  dir = opendir("/dev");
1043  if (!dir) {
1044  ret = AVERROR(errno);
1045  av_log(ctx, AV_LOG_ERROR, "Couldn't open the directory: %s\n", av_err2str(ret));
1046  return ret;
1047  }
1048  while ((entry = readdir(dir))) {
1049  char device_name[256];
1050 
1051  if (!v4l2_is_v4l_dev(entry->d_name))
1052  continue;
1053 
1054  snprintf(device_name, sizeof(device_name), "/dev/%s", entry->d_name);
1055  if ((s->fd = device_open(ctx, device_name)) < 0)
1056  continue;
1057 
1058  if (v4l2_ioctl(s->fd, VIDIOC_QUERYCAP, &cap) < 0) {
1059  ret = AVERROR(errno);
1060  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n", av_err2str(ret));
1061  goto fail;
1062  }
1063 
1064  device = av_mallocz(sizeof(AVDeviceInfo));
1065  if (!device) {
1066  ret = AVERROR(ENOMEM);
1067  goto fail;
1068  }
1069  device->device_name = av_strdup(device_name);
1070  device->device_description = av_strdup(cap.card);
1071  if (!device->device_name || !device->device_description) {
1072  ret = AVERROR(ENOMEM);
1073  goto fail;
1074  }
1075 
1076  if ((ret = av_dynarray_add_nofree(&device_list->devices,
1077  &device_list->nb_devices, device)) < 0)
1078  goto fail;
1079 
1080  v4l2_close(s->fd);
1081  s->fd = -1;
1082  continue;
1083 
1084  fail:
1085  if (device) {
1086  av_freep(&device->device_name);
1087  av_freep(&device->device_description);
1088  av_freep(&device);
1089  }
1090  if (s->fd >= 0)
1091  v4l2_close(s->fd);
1092  s->fd = -1;
1093  break;
1094  }
1095  closedir(dir);
1096  return ret;
1097 }
1098 
1099 #define OFFSET(x) offsetof(struct video_data, x)
1100 #define DEC AV_OPT_FLAG_DECODING_PARAM
1101 
1102 static const AVOption options[] = {
1103  { "standard", "set TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
1104  { "channel", "set TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = -1 }, -1, INT_MAX, DEC },
1105  { "video_size", "set frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC },
1106  { "pixel_format", "set preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1107  { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1108  { "framerate", "set frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1109 
1110  { "list_formats", "list available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
1111  { "all", "show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1112  { "raw", "show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1113  { "compressed", "show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1114 
1115  { "list_standards", "list supported standards and exit", OFFSET(list_standard), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, DEC, "list_standards" },
1116  { "all", "show all supported standards", OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, DEC, "list_standards" },
1117 
1118  { "timestamps", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1119  { "ts", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1120  { "default", "use timestamps from the kernel", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_DEFAULT }, 0, 2, DEC, "timestamps" },
1121  { "abs", "use absolute timestamps (wall clock)", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_ABS }, 0, 2, DEC, "timestamps" },
1122  { "mono2abs", "force conversion from monotonic to absolute timestamps", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, "timestamps" },
1123  { "use_libv4l2", "use libv4l2 (v4l-utils) conversion functions", OFFSET(use_libv4l2), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, DEC },
1124  { NULL },
1125 };
1126 
1127 static const AVClass v4l2_class = {
1128  .class_name = "V4L2 indev",
1129  .item_name = av_default_item_name,
1130  .option = options,
1131  .version = LIBAVUTIL_VERSION_INT,
1133 };
1134 
1136  .name = "video4linux2,v4l2",
1137  .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
1138  .priv_data_size = sizeof(struct video_data),
1140  .read_header = v4l2_read_header,
1141  .read_packet = v4l2_read_packet,
1142  .read_close = v4l2_read_close,
1143  .get_device_list = v4l2_get_device_list,
1144  .flags = AVFMT_NOFILE,
1145  .priv_class = &v4l2_class,
1146 };
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:599
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
TimeFilter
Opaque type representing a time filter state.
Definition: timefilter.c:30
av_gettime_relative
int64_t av_gettime_relative(void)
Get the current time in microseconds since some unspecified starting point.
Definition: time.c:56
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
r
const char * r
Definition: vf_curves.c:114
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
avformat_new_stream
AVStream * avformat_new_stream(AVFormatContext *s, const AVCodec *c)
Add a new stream to a media file.
Definition: utils.c:4480
video_data::channel
int channel
Definition: v4l2.c:88
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: avcodec.h:3953
codec_id
enum AVCodecID codec_id
Definition: qsv.c:72
v4l2_class
static const AVClass v4l2_class
Definition: v4l2.c:1127
n
int n
Definition: avisynth_c.h:760
ff_v4l2_demuxer
AVInputFormat ff_v4l2_demuxer
Definition: v4l2.c:1135
v4l2_set_parameters
static int v4l2_set_parameters(AVFormatContext *ctx)
Definition: v4l2.c:649
MKTAG
#define MKTAG(a, b, c, d)
Definition: common.h:366
AVDeviceInfo::device_name
char * device_name
device name, format depends on device
Definition: avdevice.h:453
av_strcasecmp
int av_strcasecmp(const char *a, const char *b)
Locale-independent case-insensitive compare.
Definition: avstring.c:213
AV_CODEC_ID_RAWVIDEO
@ AV_CODEC_ID_RAWVIDEO
Definition: avcodec.h:231
AVDeviceInfoList::nb_devices
int nb_devices
number of autodetected devices
Definition: avdevice.h:462
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
AVFormatContext::streams
AVStream ** streams
A list of all streams in the file.
Definition: avformat.h:1410
name
const char * name
Definition: avisynth_c.h:867
AVPacket::data
uint8_t * data
Definition: avcodec.h:1477
video_data::buffers
int buffers
Definition: v4l2.c:82
AVOption
AVOption.
Definition: opt.h:246
AVStream::avg_frame_rate
AVRational avg_frame_rate
Average framerate.
Definition: avformat.h:943
video_data::interlaced
int interlaced
Definition: v4l2.c:76
data
const char data[16]
Definition: mxf.c:91
mmap_init
static int mmap_init(AVFormatContext *ctx)
Definition: v4l2.c:337
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
AVCodecParameters::codec_tag
uint32_t codec_tag
Additional information about the codec (corresponds to the AVI FOURCC).
Definition: avcodec.h:3961
AVFormatContext::video_codec_id
enum AVCodecID video_codec_id
Forced video codec_id.
Definition: avformat.h:1528
video_data::width
int width
Definition: v4l2.c:74
ff_fmt_ff2v4l
uint32_t ff_fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id)
Definition: v4l2-common.c:73
ff_fmt_conversion_table
const struct fmt_map ff_fmt_conversion_table[]
Definition: v4l2-common.c:21
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
avcodec_pix_fmt_to_codec_tag
unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt)
Return a value representing the fourCC code associated to the pixel format pix_fmt,...
Definition: raw.c:304
AVPROBE_SCORE_MAX
#define AVPROBE_SCORE_MAX
maximum score
Definition: avformat.h:458
framerate
int framerate
Definition: h264_levels.c:65
fmt
const char * fmt
Definition: avisynth_c.h:861
ff_timefilter_eval
double ff_timefilter_eval(TimeFilter *self, double delta)
Evaluate the filter at a specified time.
Definition: timefilter.c:88
fail
#define fail()
Definition: checkasm.h:120
start
void INT64 start
Definition: avisynth_c.h:767
AVSTREAM_PARSE_FULL_ONCE
@ AVSTREAM_PARSE_FULL_ONCE
full parsing and repack of the first frame only, only implemented for H.264 currently
Definition: avformat.h:794
ff_fmt_v4l2codec
enum AVCodecID ff_fmt_v4l2codec(uint32_t v4l2_fmt)
Definition: v4l2-common.c:103
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
video_data::framerate
char * framerate
Set by a private option.
Definition: v4l2.c:92
AVRational::num
int num
Numerator.
Definition: rational.h:59
ff_timefilter_new
TimeFilter * ff_timefilter_new(double time_base, double period, double bandwidth)
Create a new Delay Locked Loop time filter.
Definition: timefilter.c:46
video_data::use_libv4l2
int use_libv4l2
Definition: v4l2.c:94
AVDeviceInfoList::devices
AVDeviceInfo ** devices
list of autodetected devices
Definition: avdevice.h:461
video_data::close_f
int(* close_f)(int fd)
Definition: v4l2.c:96
video_data::frame_size
int frame_size
Definition: v4l2.c:75
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
buf
void * buf
Definition: avisynth_c.h:766
AVInputFormat
Definition: avformat.h:640
device_init
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:192
width
#define width
AVCodecDescriptor
This struct describes the properties of a single codec described by an AVCodecID.
Definition: avcodec.h:716
s
#define s(width, name)
Definition: cbs_vp9.c:257
av_new_packet
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:86
video_data::buffers_queued
atomic_int buffers_queued
Definition: v4l2.c:83
video_data::buf_start
void ** buf_start
Definition: v4l2.c:84
AVFormatContext::flags
int flags
Flags modifying the (de)muxer behaviour.
Definition: avformat.h:1473
AVInputFormat::name
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:645
AVCodecParameters::width
int width
Video only.
Definition: avcodec.h:4023
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
AVProbeData::filename
const char * filename
Definition: avformat.h:447
video_data::last_time_m
int64_t last_time_m
Definition: v4l2.c:80
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
AVStream::need_parsing
enum AVStreamParseType need_parsing
Definition: avformat.h:1088
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
avcodec_get_name
const char * avcodec_get_name(enum AVCodecID id)
Get the name of a codec.
Definition: utils.c:1183
v4l2-common.h
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: avcodec.h:245
E
#define E
Definition: avdct.c:32
video_data::ts_mode
int ts_mode
Definition: v4l2.c:78
v4l2_ioctl
#define v4l2_ioctl
AVFormatContext
Format I/O context.
Definition: avformat.h:1342
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:1017
AVPacket::buf
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: avcodec.h:1460
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
video_data::std_id
v4l2_std_id std_id
Definition: v4l2.c:87
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
read_probe
static int read_probe(const AVProbeData *pd)
Definition: jvdec.c:55
video_data::fd
int fd
Definition: v4l2.c:72
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
period
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without period
Definition: writing_filters.txt:89
AV_OPT_TYPE_IMAGE_SIZE
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:233
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:191
AV_CODEC_ID_CPIA
@ AV_CODEC_ID_CPIA
Definition: avcodec.h:424
buff_data
Definition: v4l2.c:108
v4l2_open
#define v4l2_open
ff_fmt_v4l2ff
enum AVPixelFormat ff_fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
Definition: v4l2-common.c:89
V4L_RAWFORMATS
#define V4L_RAWFORMATS
Definition: v4l2.c:45
AVProbeData
This structure contains the data a format has to probe a file.
Definition: avformat.h:446
fmt_map::v4l2_fmt
uint32_t v4l2_fmt
Definition: v4l2-common.h:52
video_data::height
int height
Definition: v4l2.c:74
AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
@ AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
Definition: log.h:42
video_data::open_f
int(* open_f)(const char *file, int oflag,...)
Definition: v4l2.c:95
v4l2_close
#define v4l2_close
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: avcodec.h:215
device_open
static int device_open(AVFormatContext *ctx, const char *device_path)
Definition: v4l2.c:113
mmap_release_buffer
static void mmap_release_buffer(void *opaque, uint8_t *data)
Definition: v4l2.c:417
init_convert_timestamp
static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
Definition: v4l2.c:438
v4l2_read_close
static int v4l2_read_close(AVFormatContext *ctx)
Definition: v4l2.c:1008
list_formats
static void list_formats(AVFormatContext *ctx, int type)
Definition: v4l2.c:275
desc
const char * desc
Definition: nvenc.c:68
v4l2_read_probe
static int v4l2_read_probe(const AVProbeData *p)
Definition: v4l2.c:819
V4L_COMPFORMATS
#define V4L_COMPFORMATS
Definition: v4l2.c:46
AVPacket::size
int size
Definition: avcodec.h:1478
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:188
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
FFMAX
#define FFMAX(a, b)
Definition: common.h:94
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *s, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: utils.c:4910
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1438
AVFMT_NOFILE
#define AVFMT_NOFILE
Demuxer will use avio_open, no opened file should be provided by the caller.
Definition: avformat.h:463
AVDeviceInfo
Structure describes basic parameters of the device.
Definition: avdevice.h:452
video_data::ioctl_f
int(* ioctl_f)(int fd, int request,...)
Definition: v4l2.c:101
DEC
#define DEC
Definition: v4l2.c:1100
height
#define height
av_image_get_buffer_size
int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align)
Return the size in bytes of the amount of data required to store an image with the given parameters.
Definition: imgutils.c:431
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
video_data::pixelformat
int pixelformat
Definition: v4l2.c:73
v4l2_mmap
#define v4l2_mmap
av_strstart
int av_strstart(const char *str, const char *pfx, const char **ptr)
Return non-zero if pfx is a prefix of str.
Definition: avstring.c:34
AVDeviceInfo::device_description
char * device_description
human friendly name
Definition: avdevice.h:454
fmt_map::codec_id
enum AVCodecID codec_id
Definition: v4l2-common.h:51
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
buff_data::index
int index
Definition: v4l2.c:110
video_data::timefilter
TimeFilter * timefilter
Definition: v4l2.c:79
av_parse_video_rate
int av_parse_video_rate(AVRational *rate, const char *arg)
Parse str and store the detected values in *rate.
Definition: parseutils.c:179
AV_CODEC_ID_NONE
@ AV_CODEC_ID_NONE
Definition: avcodec.h:216
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1470
mmap_close
static void mmap_close(struct video_data *s)
Definition: v4l2.c:632
v4l2_is_v4l_dev
static int v4l2_is_v4l_dev(const char *name)
Definition: v4l2.c:1022
avcodec_descriptor_get
const AVCodecDescriptor * avcodec_descriptor_get(enum AVCodecID id)
Definition: codec_desc.c:3257
AVCodecParameters::height
int height
Definition: avcodec.h:4024
AV_TIME_BASE
#define AV_TIME_BASE
Internal time base represented as integer.
Definition: avutil.h:254
v4l2_munmap
#define v4l2_munmap
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:32
options
static const AVOption options[]
Definition: v4l2.c:1102
uint8_t
uint8_t
Definition: audio_convert.c:194
tb
#define tb
Definition: regdef.h:68
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
ff_timefilter_update
double ff_timefilter_update(TimeFilter *self, double system_time, double period)
Update the filter.
Definition: timefilter.c:72
OFFSET
#define OFFSET(x)
Definition: v4l2.c:1099
V4L_TS_CONVERT_READY
#define V4L_TS_CONVERT_READY
Once the kind of timestamps returned by the kernel have been detected, the value of the timefilter (N...
Definition: v4l2.c:68
AVFMT_FLAG_NONBLOCK
#define AVFMT_FLAG_NONBLOCK
Do not block when reading packets from input.
Definition: avformat.h:1476
v4l2_get_device_list
static int v4l2_get_device_list(AVFormatContext *ctx, AVDeviceInfoList *device_list)
Definition: v4l2.c:1030
V4L_TS_MONO2ABS
#define V4L_TS_MONO2ABS
Assume kernel timestamps are from the monotonic clock and convert to absolute timestamps.
Definition: v4l2.c:61
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:870
video_data::read_f
ssize_t(* read_f)(int fd, void *buffer, size_t n)
Definition: v4l2.c:103
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
video_data
Definition: v4l2.c:70
AVDeviceInfoList
List of devices.
Definition: avdevice.h:460
video_data::top_field_first
int top_field_first
Definition: v4l2.c:77
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:2450
V4L_ALLFORMATS
#define V4L_ALLFORMATS
Definition: v4l2.c:44
av_dynarray_add_nofree
int av_dynarray_add_nofree(void *tab_ptr, int *nb_ptr, void *elem)
Add an element to a dynamic array.
Definition: mem.c:294
pkt
static AVPacket pkt
Definition: demuxing_decoding.c:54
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
video_data::list_format
int list_format
Set by a private option.
Definition: v4l2.c:90
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:223
video_data::list_standard
int list_standard
Set by a private option.
Definition: v4l2.c:91
convert_timestamp
static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
Definition: v4l2.c:471
video_data::pixel_format
char * pixel_format
Set by a private option.
Definition: v4l2.c:89
video_data::munmap_f
int(* munmap_f)(void *_start, size_t length)
Definition: v4l2.c:105
AVStream::r_frame_rate
AVRational r_frame_rate
Real base framerate of the stream.
Definition: avformat.h:994
video_data::buf_len
unsigned int * buf_len
Definition: v4l2.c:85
buff_data::s
struct video_data * s
Definition: v4l2.c:109
av_gettime
int64_t av_gettime(void)
Get the current time in microseconds.
Definition: time.c:39
SET_WRAPPERS
#define SET_WRAPPERS(prefix)
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:251
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
video_data::standard
char * standard
Definition: v4l2.c:86
v4l2_read_header
static int v4l2_read_header(AVFormatContext *ctx)
Definition: v4l2.c:826
mmap_start
static int mmap_start(AVFormatContext *ctx)
Definition: v4l2.c:599
AVCodecParameters::format
int format
Definition: avcodec.h:3981
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVCodecParameters::codec_id
enum AVCodecID codec_id
Specific type of the encoded data (the codec used).
Definition: avcodec.h:3957
AVPacket
This structure stores compressed data.
Definition: avcodec.h:1454
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Definition: opt.h:240
list_standards
static void list_standards(AVFormatContext *ctx)
Definition: v4l2.c:313
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
v4l2_read_packet
static int v4l2_read_packet(AVFormatContext *ctx, AVPacket *pkt)
Definition: v4l2.c:982
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:565
video_data::dup_f
int(* dup_f)(int fd)
Definition: v4l2.c:97
AVCodecParameters::bit_rate
int64_t bit_rate
The average bitrate of the encoded data (in bits per second).
Definition: avcodec.h:3986
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
length
const char int length
Definition: avisynth_c.h:860
AVERROR_EXIT
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:56
V4L_TS_DEFAULT
#define V4L_TS_DEFAULT
Return timestamps to the user exactly as returned by the kernel.
Definition: v4l2.c:51
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:282
AV_OPT_TYPE_STRING
@ AV_OPT_TYPE_STRING
Definition: opt.h:227
int
int
Definition: ffmpeg_filter.c:191
first_field
static int first_field(const struct video_data *s)
Definition: v4l2.c:234
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Definition: opt.h:232
snprintf
#define snprintf
Definition: snprintf.h:34
AVFormatContext::priv_data
void * priv_data
Format private data.
Definition: avformat.h:1370
V4L_TS_ABS
#define V4L_TS_ABS
Autodetect the kind of timestamps returned by the kernel and convert to absolute (wall clock) timesta...
Definition: v4l2.c:56
avcodec_descriptor_get_by_name
const AVCodecDescriptor * avcodec_descriptor_get_by_name(const char *name)
Definition: codec_desc.c:3272
channel
channel
Definition: ebur128.h:39
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:308
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2438
enqueue_buffer
static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
Definition: v4l2.c:403
mmap_read_frame
static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
Definition: v4l2.c:492
desired_video_buffers
static const int desired_video_buffers
Definition: v4l2.c:42
device_try_init
static int device_try_init(AVFormatContext *ctx, enum AVPixelFormat pix_fmt, int *width, int *height, uint32_t *desired_format, enum AVCodecID *codec_id)
Definition: v4l2.c:767