FFmpeg
 All Data Structures Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
v4l2.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2000,2001 Fabrice Bellard
3  * Copyright (c) 2006 Luca Abeni
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Video4Linux2 grab interface
25  *
26  * Part of this file is based on the V4L2 video capture example
27  * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
28  *
29  * Thanks to Michael Niedermayer for providing the mapping between
30  * V4L2_PIX_FMT_* and AV_PIX_FMT_*
31  */
32 
33 #undef __STRICT_ANSI__ //workaround due to broken kernel headers
34 #include "config.h"
35 #include "libavformat/internal.h"
36 #include <unistd.h>
37 #include <fcntl.h>
38 #include <sys/ioctl.h>
39 #include <sys/mman.h>
40 #include <sys/time.h>
41 #if HAVE_SYS_VIDEOIO_H
42 #include <sys/videoio.h>
43 #else
44 #if HAVE_ASM_TYPES_H
45 #include <asm/types.h>
46 #endif
47 #include <linux/videodev2.h>
48 #endif
49 #include "libavutil/avassert.h"
50 #include "libavutil/imgutils.h"
51 #include "libavutil/log.h"
52 #include "libavutil/opt.h"
53 #include "avdevice.h"
54 #include "timefilter.h"
55 #include "libavutil/parseutils.h"
56 #include "libavutil/pixdesc.h"
57 #include "libavutil/avstring.h"
58 
59 #if CONFIG_LIBV4L2
60 #include <libv4l2.h>
61 #else
62 #define v4l2_open open
63 #define v4l2_close close
64 #define v4l2_dup dup
65 #define v4l2_ioctl ioctl
66 #define v4l2_read read
67 #define v4l2_mmap mmap
68 #define v4l2_munmap munmap
69 #endif
70 
71 static const int desired_video_buffers = 256;
72 
73 #define V4L_ALLFORMATS 3
74 #define V4L_RAWFORMATS 1
75 #define V4L_COMPFORMATS 2
76 
77 /**
78  * Return timestamps to the user exactly as returned by the kernel
79  */
80 #define V4L_TS_DEFAULT 0
81 /**
82  * Autodetect the kind of timestamps returned by the kernel and convert to
83  * absolute (wall clock) timestamps.
84  */
85 #define V4L_TS_ABS 1
86 /**
87  * Assume kernel timestamps are from the monotonic clock and convert to
88  * absolute timestamps.
89  */
90 #define V4L_TS_MONO2ABS 2
91 
92 /**
93  * Once the kind of timestamps returned by the kernel have been detected,
94  * the value of the timefilter (NULL or not) determines whether a conversion
95  * takes place.
96  */
97 #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
98 
99 struct video_data {
100  AVClass *class;
101  int fd;
102  int frame_format; /* V4L2_PIX_FMT_* */
103  int width, height;
107  int ts_mode;
109  int64_t last_time_m;
110 
111  int buffers;
112  void **buf_start;
113  unsigned int *buf_len;
115  char *standard;
116  v4l2_std_id std_id;
117  int channel;
118  char *pixel_format; /**< Set by a private option. */
119  int list_format; /**< Set by a private option. */
120  int list_standard; /**< Set by a private option. */
121  char *framerate; /**< Set by a private option. */
122 };
123 
124 struct buff_data {
125  int index;
126  int fd;
128 };
129 
130 struct fmt_map {
133  uint32_t v4l2_fmt;
134 };
135 
136 static struct fmt_map fmt_conversion_table[] = {
137  //ff_fmt codec_id v4l2_fmt
138  { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV420 },
139  { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU420 },
140  { AV_PIX_FMT_YUV422P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV422P },
141  { AV_PIX_FMT_YUYV422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUYV },
142  { AV_PIX_FMT_UYVY422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_UYVY },
143  { AV_PIX_FMT_YUV411P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV411P },
144  { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV410 },
145  { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU410 },
146  { AV_PIX_FMT_RGB555LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555 },
147  { AV_PIX_FMT_RGB555BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555X },
148  { AV_PIX_FMT_RGB565LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565 },
149  { AV_PIX_FMT_RGB565BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565X },
150  { AV_PIX_FMT_BGR24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR24 },
151  { AV_PIX_FMT_RGB24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB24 },
152  { AV_PIX_FMT_BGR0, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR32 },
153  { AV_PIX_FMT_0RGB, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB32 },
154  { AV_PIX_FMT_GRAY8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_GREY },
155 #ifdef V4L2_PIX_FMT_Y16
156  { AV_PIX_FMT_GRAY16LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_Y16 },
157 #endif
158  { AV_PIX_FMT_NV12, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_NV12 },
159  { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_MJPEG },
160  { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_JPEG },
161 #ifdef V4L2_PIX_FMT_H264
162  { AV_PIX_FMT_NONE, AV_CODEC_ID_H264, V4L2_PIX_FMT_H264 },
163 #endif
164 #ifdef V4L2_PIX_FMT_CPIA1
165  { AV_PIX_FMT_NONE, AV_CODEC_ID_CPIA, V4L2_PIX_FMT_CPIA1 },
166 #endif
167 };
168 
169 static int device_open(AVFormatContext *ctx)
170 {
171  struct v4l2_capability cap;
172  int fd;
173  int ret;
174  int flags = O_RDWR;
175 
176  if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
177  flags |= O_NONBLOCK;
178  }
179 
180  fd = v4l2_open(ctx->filename, flags, 0);
181  if (fd < 0) {
182  ret = AVERROR(errno);
183  av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
184  ctx->filename, av_err2str(ret));
185  return ret;
186  }
187 
188  if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
189  ret = AVERROR(errno);
190  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
191  av_err2str(ret));
192  goto fail;
193  }
194 
195  av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
196  fd, cap.capabilities);
197 
198  if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
199  av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
200  ret = AVERROR(ENODEV);
201  goto fail;
202  }
203 
204  if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
205  av_log(ctx, AV_LOG_ERROR,
206  "The device does not support the streaming I/O method.\n");
207  ret = AVERROR(ENOSYS);
208  goto fail;
209  }
210 
211  return fd;
212 
213 fail:
214  v4l2_close(fd);
215  return ret;
216 }
217 
218 static int device_init(AVFormatContext *ctx, int *width, int *height,
219  uint32_t pix_fmt)
220 {
221  struct video_data *s = ctx->priv_data;
222  int fd = s->fd;
223  struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
224  struct v4l2_pix_format *pix = &fmt.fmt.pix;
225 
226  int res = 0;
227 
228  pix->width = *width;
229  pix->height = *height;
230  pix->pixelformat = pix_fmt;
231  pix->field = V4L2_FIELD_ANY;
232 
233  if (v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
234  res = AVERROR(errno);
235 
236  if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
237  av_log(ctx, AV_LOG_INFO,
238  "The V4L2 driver changed the video from %dx%d to %dx%d\n",
239  *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
240  *width = fmt.fmt.pix.width;
241  *height = fmt.fmt.pix.height;
242  }
243 
244  if (pix_fmt != fmt.fmt.pix.pixelformat) {
245  av_log(ctx, AV_LOG_DEBUG,
246  "The V4L2 driver changed the pixel format "
247  "from 0x%08X to 0x%08X\n",
248  pix_fmt, fmt.fmt.pix.pixelformat);
249  res = AVERROR(EINVAL);
250  }
251 
252  if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
253  av_log(ctx, AV_LOG_DEBUG,
254  "The V4L2 driver is using the interlaced mode\n");
255  s->interlaced = 1;
256  }
257 
258  return res;
259 }
260 
261 static int first_field(int fd)
262 {
263  int res;
264  v4l2_std_id std;
265 
266  res = v4l2_ioctl(fd, VIDIOC_G_STD, &std);
267  if (res < 0) {
268  return 0;
269  }
270  if (std & V4L2_STD_NTSC) {
271  return 0;
272  }
273 
274  return 1;
275 }
276 
277 static uint32_t fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id)
278 {
279  int i;
280 
281  for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
282  if ((codec_id == AV_CODEC_ID_NONE ||
283  fmt_conversion_table[i].codec_id == codec_id) &&
284  (pix_fmt == AV_PIX_FMT_NONE ||
285  fmt_conversion_table[i].ff_fmt == pix_fmt)) {
286  return fmt_conversion_table[i].v4l2_fmt;
287  }
288  }
289 
290  return 0;
291 }
292 
293 static enum AVPixelFormat fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
294 {
295  int i;
296 
297  for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
298  if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt &&
299  fmt_conversion_table[i].codec_id == codec_id) {
300  return fmt_conversion_table[i].ff_fmt;
301  }
302  }
303 
304  return AV_PIX_FMT_NONE;
305 }
306 
307 static enum AVCodecID fmt_v4l2codec(uint32_t v4l2_fmt)
308 {
309  int i;
310 
311  for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
312  if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt) {
313  return fmt_conversion_table[i].codec_id;
314  }
315  }
316 
317  return AV_CODEC_ID_NONE;
318 }
319 
320 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
321 static void list_framesizes(AVFormatContext *ctx, int fd, uint32_t pixelformat)
322 {
323  struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
324 
325  while(!ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
326  switch (vfse.type) {
327  case V4L2_FRMSIZE_TYPE_DISCRETE:
328  av_log(ctx, AV_LOG_INFO, " %ux%u",
329  vfse.discrete.width, vfse.discrete.height);
330  break;
331  case V4L2_FRMSIZE_TYPE_CONTINUOUS:
332  case V4L2_FRMSIZE_TYPE_STEPWISE:
333  av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
334  vfse.stepwise.min_width,
335  vfse.stepwise.max_width,
336  vfse.stepwise.step_width,
337  vfse.stepwise.min_height,
338  vfse.stepwise.max_height,
339  vfse.stepwise.step_height);
340  }
341  vfse.index++;
342  }
343 }
344 #endif
345 
346 static void list_formats(AVFormatContext *ctx, int fd, int type)
347 {
348  struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
349 
350  while(!ioctl(fd, VIDIOC_ENUM_FMT, &vfd)) {
351  enum AVCodecID codec_id = fmt_v4l2codec(vfd.pixelformat);
352  enum AVPixelFormat pix_fmt = fmt_v4l2ff(vfd.pixelformat, codec_id);
353 
354  vfd.index++;
355 
356  if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
357  type & V4L_RAWFORMATS) {
358  const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
359  av_log(ctx, AV_LOG_INFO, "Raw : %9s : %20s :",
360  fmt_name ? fmt_name : "Unsupported",
361  vfd.description);
362  } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
363  type & V4L_COMPFORMATS) {
364  AVCodec *codec = avcodec_find_decoder(codec_id);
365  av_log(ctx, AV_LOG_INFO, "Compressed: %9s : %20s :",
366  codec ? codec->name : "Unsupported",
367  vfd.description);
368  } else {
369  continue;
370  }
371 
372 #ifdef V4L2_FMT_FLAG_EMULATED
373  if (vfd.flags & V4L2_FMT_FLAG_EMULATED) {
374  av_log(ctx, AV_LOG_WARNING, "%s", "Emulated");
375  continue;
376  }
377 #endif
378 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
379  list_framesizes(ctx, fd, vfd.pixelformat);
380 #endif
381  av_log(ctx, AV_LOG_INFO, "\n");
382  }
383 }
384 
386 {
387  int ret;
388  struct video_data *s = ctx->priv_data;
389  struct v4l2_standard standard;
390 
391  if (s->std_id == 0)
392  return;
393 
394  for (standard.index = 0; ; standard.index++) {
395  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
396  ret = AVERROR(errno);
397  if (ret == AVERROR(EINVAL)) {
398  break;
399  } else {
400  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
401  return;
402  }
403  }
404  av_log(ctx, AV_LOG_INFO, "%2d, %16llx, %s\n",
405  standard.index, standard.id, standard.name);
406  }
407 }
408 
409 static int mmap_init(AVFormatContext *ctx)
410 {
411  int i, res;
412  struct video_data *s = ctx->priv_data;
413  struct v4l2_requestbuffers req = {
414  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
415  .count = desired_video_buffers,
416  .memory = V4L2_MEMORY_MMAP
417  };
418 
419  if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
420  res = AVERROR(errno);
421  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
422  return res;
423  }
424 
425  if (req.count < 2) {
426  av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
427  return AVERROR(ENOMEM);
428  }
429  s->buffers = req.count;
430  s->buf_start = av_malloc(sizeof(void *) * s->buffers);
431  if (s->buf_start == NULL) {
432  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
433  return AVERROR(ENOMEM);
434  }
435  s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);
436  if (s->buf_len == NULL) {
437  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
438  av_free(s->buf_start);
439  return AVERROR(ENOMEM);
440  }
441  s->buf_dequeued = av_mallocz(sizeof(int) * s->buffers);
442  if (s->buf_dequeued == NULL) {
443  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer array\n");
444  return AVERROR(ENOMEM);
445  }
446 
447  for (i = 0; i < req.count; i++) {
448  struct v4l2_buffer buf = {
449  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
450  .index = i,
451  .memory = V4L2_MEMORY_MMAP
452  };
453  if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
454  res = AVERROR(errno);
455  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
456  return res;
457  }
458 
459  s->buf_len[i] = buf.length;
460  if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
461  av_log(ctx, AV_LOG_ERROR,
462  "buf_len[%d] = %d < expected frame size %d\n",
463  i, s->buf_len[i], s->frame_size);
464  return AVERROR(ENOMEM);
465  }
466  s->buf_start[i] = v4l2_mmap(NULL, buf.length,
467  PROT_READ | PROT_WRITE, MAP_SHARED,
468  s->fd, buf.m.offset);
469 
470  if (s->buf_start[i] == MAP_FAILED) {
471  res = AVERROR(errno);
472  av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
473  return res;
474  }
475  }
476 
477  return 0;
478 }
479 
480 static int enqueue_buffer(int fd, int index)
481 {
482  int res;
483  struct v4l2_buffer buf = { 0 };
484 
485  buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
486  buf.memory = V4L2_MEMORY_MMAP;
487  buf.index = index;
488 
489  if (v4l2_ioctl(fd, VIDIOC_QBUF, &buf) < 0) {
490  res = AVERROR(errno);
491  av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
492  return res;
493  }
494  return 0;
495 }
496 
498 {
499  struct buff_data *buf_descriptor = pkt->priv;
500 
501  if (pkt->data == NULL)
502  return;
503 
504  if (buf_descriptor->index == -1) {
505  av_free(pkt->data);
506  } else {
507  if (!enqueue_buffer(buf_descriptor->fd, buf_descriptor->index))
508  buf_descriptor->buf_dequeued[buf_descriptor->index] = 0;
509  }
510  av_free(buf_descriptor);
511 
512  pkt->data = NULL;
513  pkt->size = 0;
514 }
515 
516 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
517 static int64_t av_gettime_monotonic(void)
518 {
519  struct timespec tv;
520 
521  clock_gettime(CLOCK_MONOTONIC, &tv);
522  return (int64_t)tv.tv_sec * 1000000 + tv.tv_nsec / 1000;
523 }
524 #endif
525 
526 static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
527 {
528  struct video_data *s = ctx->priv_data;
529  int64_t now;
530 
531  now = av_gettime();
532  if (s->ts_mode == V4L_TS_ABS &&
533  ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
534  av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
536  return 0;
537  }
538 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
539  now = av_gettime_monotonic();
540  if (s->ts_mode == V4L_TS_MONO2ABS ||
541  (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
542  int64_t period = av_rescale_q(1, AV_TIME_BASE_Q,
543  ctx->streams[0]->avg_frame_rate);
544  av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
545  /* microseconds instead of seconds, MHz instead of Hz */
546  s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
548  return 0;
549  }
550 #endif
551  av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
552  return AVERROR(EIO);
553 }
554 
555 static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
556 {
557  struct video_data *s = ctx->priv_data;
558 
559  if (s->ts_mode) {
560  int r = init_convert_timestamp(ctx, *ts);
561  if (r < 0)
562  return r;
563  }
564 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
565  if (s->timefilter) {
566  int64_t nowa = av_gettime();
567  int64_t nowm = av_gettime_monotonic();
568  ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
569  s->last_time_m = nowm;
570  *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
571  }
572 #endif
573  return 0;
574 }
575 
577 {
578  struct video_data *s = ctx->priv_data;
579  struct v4l2_buffer buf = {
580  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
581  .memory = V4L2_MEMORY_MMAP
582  };
583  struct buff_data *buf_descriptor;
584  int res, i, free_buffers;
585 
586  /* FIXME: Some special treatment might be needed in case of loss of signal... */
587  while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
588  if (res < 0) {
589  if (errno == EAGAIN)
590  return AVERROR(EAGAIN);
591  res = AVERROR(errno);
592  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", av_err2str(res));
593  return res;
594  }
595 
596  if (buf.index >= s->buffers) {
597  av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
598  return AVERROR(EINVAL);
599  }
600 
601  /* CPIA is a compressed format and we don't know the exact number of bytes
602  * used by a frame, so set it here as the driver announces it.
603  */
604  if (ctx->video_codec_id == AV_CODEC_ID_CPIA)
605  s->frame_size = buf.bytesused;
606 
607  if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
608  av_log(ctx, AV_LOG_ERROR,
609  "The v4l2 frame is %d bytes, but %d bytes are expected\n",
610  buf.bytesused, s->frame_size);
611  enqueue_buffer(s->fd, buf.index);
612  return AVERROR_INVALIDDATA;
613  }
614 
615  buf_descriptor = av_malloc(sizeof(struct buff_data));
616  if (buf_descriptor == NULL) {
617  /* Something went wrong... Since av_malloc() failed, we cannot even
618  * allocate a buffer for memcopying into it
619  */
620  av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
621  res = v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf);
622  return AVERROR(ENOMEM);
623  }
624  buf_descriptor->fd = s->fd;
625  buf_descriptor->buf_dequeued = s->buf_dequeued;
626 
627  free_buffers = -1; /* start from -1 because we just dequeued a buffer */
628  for (i = 0; i < s->buffers; i++)
629  if (s->buf_dequeued[i] == 0)
630  free_buffers++;
631 
632  if (free_buffers == 0) {
633  if ((res = av_new_packet(pkt, buf.bytesused)) < 0) {
634  enqueue_buffer(s->fd, buf.index);
635  return res;
636  }
637  memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
638  enqueue_buffer(s->fd, buf.index);
639  buf_descriptor->index = -1;
640  } else {
641  /* Image is at s->buff_start[buf.index] */
642  pkt->data = s->buf_start[buf.index];
643  buf_descriptor->index = buf.index;
644  buf_descriptor->buf_dequeued[buf.index] = 1;
645  }
646  pkt->size = buf.bytesused;
647  pkt->priv = buf_descriptor;
649  pkt->pts = buf.timestamp.tv_sec * INT64_C(1000000) + buf.timestamp.tv_usec;
650  res = convert_timestamp(ctx, &pkt->pts);
651  if (res < 0) {
652  mmap_release_buffer(pkt);
653  return res;
654  }
655 
656  return s->buf_len[buf.index];
657 }
658 
659 static int mmap_start(AVFormatContext *ctx)
660 {
661  struct video_data *s = ctx->priv_data;
662  enum v4l2_buf_type type;
663  int i, res;
664 
665  for (i = 0; i < s->buffers; i++) {
666  struct v4l2_buffer buf = {
667  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
668  .index = i,
669  .memory = V4L2_MEMORY_MMAP
670  };
671 
672  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
673  res = AVERROR(errno);
674  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
675  return res;
676  }
677  }
678 
679  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
680  if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
681  res = AVERROR(errno);
682  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", av_err2str(res));
683  return res;
684  }
685 
686  return 0;
687 }
688 
689 static void mmap_close(struct video_data *s)
690 {
691  enum v4l2_buf_type type;
692  int i;
693 
694  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
695  /* We do not check for the result, because we could
696  * not do anything about it anyway...
697  */
698  v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
699  for (i = 0; i < s->buffers; i++) {
700  v4l2_munmap(s->buf_start[i], s->buf_len[i]);
701  }
702  av_free(s->buf_start);
703  av_free(s->buf_len);
704  av_free(s->buf_dequeued);
705 }
706 
708 {
709  struct video_data *s = s1->priv_data;
710  struct v4l2_standard standard = { 0 };
711  struct v4l2_streamparm streamparm = { 0 };
712  struct v4l2_fract *tpf;
713  AVRational framerate_q = { 0 };
714  int i, ret;
715 
716  if (s->framerate &&
717  (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
718  av_log(s1, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
719  s->framerate);
720  return ret;
721  }
722 
723  if (s->standard) {
724  if (s->std_id) {
725  ret = 0;
726  av_log(s1, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
727  /* set tv standard */
728  for (i = 0; ; i++) {
729  standard.index = i;
730  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
731  ret = AVERROR(errno);
732  break;
733  }
734  if (!av_strcasecmp(standard.name, s->standard))
735  break;
736  }
737  if (ret < 0) {
738  av_log(s1, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
739  return ret;
740  }
741 
742  if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
743  ret = AVERROR(errno);
744  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
745  return ret;
746  }
747  } else {
749  "This device does not support any standard\n");
750  }
751  }
752 
753  /* get standard */
754  if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
755  tpf = &standard.frameperiod;
756  for (i = 0; ; i++) {
757  standard.index = i;
758  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
759  ret = AVERROR(errno);
760  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
761  return ret;
762  }
763  if (standard.id == s->std_id) {
764  av_log(s1, AV_LOG_DEBUG,
765  "Current standard: %s, id: %"PRIu64", frameperiod: %d/%d\n",
766  standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
767  break;
768  }
769  }
770  } else {
771  tpf = &streamparm.parm.capture.timeperframe;
772  }
773 
774  streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
775  if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
776  ret = AVERROR(errno);
777  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
778  return ret;
779  }
780 
781  if (framerate_q.num && framerate_q.den) {
782  if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
783  tpf = &streamparm.parm.capture.timeperframe;
784 
785  av_log(s1, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
786  framerate_q.den, framerate_q.num);
787  tpf->numerator = framerate_q.den;
788  tpf->denominator = framerate_q.num;
789 
790  if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
791  ret = AVERROR(errno);
792  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n", av_err2str(ret));
793  return ret;
794  }
795 
796  if (framerate_q.num != tpf->denominator ||
797  framerate_q.den != tpf->numerator) {
798  av_log(s1, AV_LOG_INFO,
799  "The driver changed the time per frame from "
800  "%d/%d to %d/%d\n",
801  framerate_q.den, framerate_q.num,
802  tpf->numerator, tpf->denominator);
803  }
804  } else {
806  "The driver does not allow to change time per frame\n");
807  }
808  }
809  s1->streams[0]->avg_frame_rate.num = tpf->denominator;
810  s1->streams[0]->avg_frame_rate.den = tpf->numerator;
811  s1->streams[0]->r_frame_rate = s1->streams[0]->avg_frame_rate;
812 
813  return 0;
814 }
815 
817  enum AVPixelFormat pix_fmt,
818  int *width,
819  int *height,
820  uint32_t *desired_format,
821  enum AVCodecID *codec_id)
822 {
823  int ret, i;
824 
825  *desired_format = fmt_ff2v4l(pix_fmt, s1->video_codec_id);
826 
827  if (*desired_format) {
828  ret = device_init(s1, width, height, *desired_format);
829  if (ret < 0) {
830  *desired_format = 0;
831  if (ret != AVERROR(EINVAL))
832  return ret;
833  }
834  }
835 
836  if (!*desired_format) {
837  for (i = 0; i<FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
838  if (s1->video_codec_id == AV_CODEC_ID_NONE ||
839  fmt_conversion_table[i].codec_id == s1->video_codec_id) {
840  av_log(s1, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
841  avcodec_get_name(fmt_conversion_table[i].codec_id),
842  (char *)av_x_if_null(av_get_pix_fmt_name(fmt_conversion_table[i].ff_fmt), "none"));
843 
844  *desired_format = fmt_conversion_table[i].v4l2_fmt;
845  ret = device_init(s1, width, height, *desired_format);
846  if (ret >= 0)
847  break;
848  else if (ret != AVERROR(EINVAL))
849  return ret;
850  *desired_format = 0;
851  }
852  }
853 
854  if (*desired_format == 0) {
855  av_log(s1, AV_LOG_ERROR, "Cannot find a proper format for "
856  "codec '%s' (id %d), pixel format '%s' (id %d)\n",
858  (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
859  ret = AVERROR(EINVAL);
860  }
861  }
862 
863  *codec_id = fmt_v4l2codec(*desired_format);
864  av_assert0(*codec_id != AV_CODEC_ID_NONE);
865  return ret;
866 }
867 
869 {
870  struct video_data *s = s1->priv_data;
871  AVStream *st;
872  int res = 0;
873  uint32_t desired_format;
876  struct v4l2_input input = { 0 };
877 
878  st = avformat_new_stream(s1, NULL);
879  if (!st)
880  return AVERROR(ENOMEM);
881 
882  s->fd = device_open(s1);
883  if (s->fd < 0)
884  return s->fd;
885 
886  /* set tv video input */
887  av_log(s1, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
888  if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
889  res = AVERROR(errno);
890  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
891  return res;
892  }
893 
894  input.index = s->channel;
895  if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
896  res = AVERROR(errno);
897  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
898  return res;
899  }
900  s->std_id = input.std;
901  av_log(s1, AV_LOG_DEBUG, "input_channel: %d, input_name: %s\n",
902  s->channel, input.name);
903 
904  if (s->list_format) {
905  list_formats(s1, s->fd, s->list_format);
906  return AVERROR_EXIT;
907  }
908 
909  if (s->list_standard) {
910  list_standards(s1);
911  return AVERROR_EXIT;
912  }
913 
914  avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
915 
916  if (s->pixel_format) {
918 
919  if (codec)
920  s1->video_codec_id = codec->id;
921 
922  pix_fmt = av_get_pix_fmt(s->pixel_format);
923 
924  if (pix_fmt == AV_PIX_FMT_NONE && !codec) {
925  av_log(s1, AV_LOG_ERROR, "No such input format: %s.\n",
926  s->pixel_format);
927 
928  return AVERROR(EINVAL);
929  }
930  }
931 
932  if (!s->width && !s->height) {
933  struct v4l2_format fmt;
934 
936  "Querying the device for the current frame size\n");
937  fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
938  if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
939  res = AVERROR(errno);
940  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", av_err2str(res));
941  return res;
942  }
943 
944  s->width = fmt.fmt.pix.width;
945  s->height = fmt.fmt.pix.height;
947  "Setting frame size to %dx%d\n", s->width, s->height);
948  }
949 
950  res = device_try_init(s1, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
951  if (res < 0) {
952  v4l2_close(s->fd);
953  return res;
954  }
955 
956  /* If no pixel_format was specified, the codec_id was not known up
957  * until now. Set video_codec_id in the context, as codec_id will
958  * not be available outside this function
959  */
960  if (codec_id != AV_CODEC_ID_NONE && s1->video_codec_id == AV_CODEC_ID_NONE)
961  s1->video_codec_id = codec_id;
962 
963  if ((res = av_image_check_size(s->width, s->height, 0, s1)) < 0)
964  return res;
965 
966  s->frame_format = desired_format;
967 
968  if ((res = v4l2_set_parameters(s1)) < 0)
969  return res;
970 
971  st->codec->pix_fmt = fmt_v4l2ff(desired_format, codec_id);
972  s->frame_size =
974 
975  if ((res = mmap_init(s1)) ||
976  (res = mmap_start(s1)) < 0) {
977  v4l2_close(s->fd);
978  return res;
979  }
980 
981  s->top_field_first = first_field(s->fd);
982 
984  st->codec->codec_id = codec_id;
985  if (codec_id == AV_CODEC_ID_RAWVIDEO)
986  st->codec->codec_tag =
988  if (desired_format == V4L2_PIX_FMT_YVU420)
989  st->codec->codec_tag = MKTAG('Y', 'V', '1', '2');
990  else if (desired_format == V4L2_PIX_FMT_YVU410)
991  st->codec->codec_tag = MKTAG('Y', 'V', 'U', '9');
992  st->codec->width = s->width;
993  st->codec->height = s->height;
994  st->codec->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
995 
996  return 0;
997 }
998 
1000 {
1001  struct video_data *s = s1->priv_data;
1002  AVFrame *frame = s1->streams[0]->codec->coded_frame;
1003  int res;
1004 
1005  av_init_packet(pkt);
1006  pkt->data = NULL;
1007  pkt->size = 0;
1008  if ((res = mmap_read_frame(s1, pkt)) < 0) {
1009  return res;
1010  }
1011 
1012  if (frame && s->interlaced) {
1013  frame->interlaced_frame = 1;
1014  frame->top_field_first = s->top_field_first;
1015  }
1016 
1017  return pkt->size;
1018 }
1019 
1021 {
1022  struct video_data *s = s1->priv_data;
1023 
1024  mmap_close(s);
1025 
1026  v4l2_close(s->fd);
1027  return 0;
1028 }
1029 
1030 #define OFFSET(x) offsetof(struct video_data, x)
1031 #define DEC AV_OPT_FLAG_DECODING_PARAM
1032 
1033 static const AVOption options[] = {
1034  { "standard", "set TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
1035  { "channel", "set TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC },
1036  { "video_size", "set frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC },
1037  { "pixel_format", "set preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1038  { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1039  { "framerate", "set frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1040 
1041  { "list_formats", "list available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
1042  { "all", "show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1043  { "raw", "show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1044  { "compressed", "show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1045 
1046  { "list_standards", "list supported standards and exit", OFFSET(list_standard), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, DEC, "list_standards" },
1047  { "all", "show all supported standards", OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, DEC, "list_standards" },
1048 
1049  { "timestamps", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1050  { "ts", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1051  { "default", "use timestamps from the kernel", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_DEFAULT }, 0, 2, DEC, "timestamps" },
1052  { "abs", "use absolute timestamps (wall clock)", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_ABS }, 0, 2, DEC, "timestamps" },
1053  { "mono2abs", "force conversion from monotonic to absolute timestamps", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, "timestamps" },
1054 
1055  { NULL },
1056 };
1057 
1058 static const AVClass v4l2_class = {
1059  .class_name = "V4L2 indev",
1060  .item_name = av_default_item_name,
1061  .option = options,
1062  .version = LIBAVUTIL_VERSION_INT,
1063 };
1064 
1066  .name = "video4linux2,v4l2",
1067  .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
1068  .priv_data_size = sizeof(struct video_data),
1069  .read_header = v4l2_read_header,
1070  .read_packet = v4l2_read_packet,
1071  .read_close = v4l2_read_close,
1072  .flags = AVFMT_NOFILE,
1073  .priv_class = &v4l2_class,
1074 };