FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
v4l2_buffers.c
Go to the documentation of this file.
1 /*
2  * V4L2 buffer helper functions.
3  *
4  * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
5  * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <linux/videodev2.h>
25 #include <sys/ioctl.h>
26 #include <sys/mman.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 #include <poll.h>
30 #include "libavcodec/avcodec.h"
31 #include "libavcodec/internal.h"
32 #include "v4l2_context.h"
33 #include "v4l2_buffers.h"
34 #include "v4l2_m2m.h"
35 
36 #define USEC_PER_SEC 1000000
37 
39 {
40  return V4L2_TYPE_IS_OUTPUT(buf->context->type) ?
41  container_of(buf->context, V4L2m2mContext, output) :
42  container_of(buf->context, V4L2m2mContext, capture);
43 }
44 
46 {
47  return buf_to_m2mctx(buf)->avctx;
48 }
49 
50 static inline void v4l2_set_pts(V4L2Buffer *out, int64_t pts)
51 {
53  AVRational v4l2_timebase = { 1, USEC_PER_SEC };
54  int64_t v4l2_pts;
55 
56  if (pts == AV_NOPTS_VALUE)
57  pts = 0;
58 
59  /* convert pts to v4l2 timebase */
60  v4l2_pts = av_rescale_q(pts, s->avctx->time_base, v4l2_timebase);
61  out->buf.timestamp.tv_usec = v4l2_pts % USEC_PER_SEC;
62  out->buf.timestamp.tv_sec = v4l2_pts / USEC_PER_SEC;
63 }
64 
65 static inline uint64_t v4l2_get_pts(V4L2Buffer *avbuf)
66 {
67  V4L2m2mContext *s = buf_to_m2mctx(avbuf);
68  AVRational v4l2_timebase = { 1, USEC_PER_SEC };
69  int64_t v4l2_pts;
70 
71  /* convert pts back to encoder timebase */
72  v4l2_pts = avbuf->buf.timestamp.tv_sec * USEC_PER_SEC + avbuf->buf.timestamp.tv_usec;
73 
74  return av_rescale_q(v4l2_pts, v4l2_timebase, s->avctx->time_base);
75 }
76 
78 {
79  enum v4l2_ycbcr_encoding ycbcr;
80  enum v4l2_colorspace cs;
81 
82  cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
83  buf->context->format.fmt.pix_mp.colorspace :
84  buf->context->format.fmt.pix.colorspace;
85 
86  ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
87  buf->context->format.fmt.pix_mp.ycbcr_enc:
88  buf->context->format.fmt.pix.ycbcr_enc;
89 
90  switch(ycbcr) {
91  case V4L2_YCBCR_ENC_XV709:
92  case V4L2_YCBCR_ENC_709: return AVCOL_PRI_BT709;
93  case V4L2_YCBCR_ENC_XV601:
94  case V4L2_YCBCR_ENC_601:return AVCOL_PRI_BT470M;
95  default:
96  break;
97  }
98 
99  switch(cs) {
100  case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_PRI_BT470BG;
101  case V4L2_COLORSPACE_SMPTE170M: return AVCOL_PRI_SMPTE170M;
102  case V4L2_COLORSPACE_SMPTE240M: return AVCOL_PRI_SMPTE240M;
103  case V4L2_COLORSPACE_BT2020: return AVCOL_PRI_BT2020;
104  default:
105  break;
106  }
107 
108  return AVCOL_PRI_UNSPECIFIED;
109 }
110 
112 {
113  enum v4l2_quantization qt;
114 
115  qt = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
116  buf->context->format.fmt.pix_mp.quantization :
117  buf->context->format.fmt.pix.quantization;
118 
119  switch (qt) {
120  case V4L2_QUANTIZATION_LIM_RANGE: return AVCOL_RANGE_MPEG;
121  case V4L2_QUANTIZATION_FULL_RANGE: return AVCOL_RANGE_JPEG;
122  default:
123  break;
124  }
125 
127 }
128 
130 {
131  enum v4l2_ycbcr_encoding ycbcr;
132  enum v4l2_colorspace cs;
133 
134  cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
135  buf->context->format.fmt.pix_mp.colorspace :
136  buf->context->format.fmt.pix.colorspace;
137 
138  ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
139  buf->context->format.fmt.pix_mp.ycbcr_enc:
140  buf->context->format.fmt.pix.ycbcr_enc;
141 
142  switch(cs) {
143  case V4L2_COLORSPACE_SRGB: return AVCOL_SPC_RGB;
144  case V4L2_COLORSPACE_REC709: return AVCOL_SPC_BT709;
145  case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_SPC_FCC;
146  case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_SPC_BT470BG;
147  case V4L2_COLORSPACE_SMPTE170M: return AVCOL_SPC_SMPTE170M;
148  case V4L2_COLORSPACE_SMPTE240M: return AVCOL_SPC_SMPTE240M;
149  case V4L2_COLORSPACE_BT2020:
150  if (ycbcr == V4L2_YCBCR_ENC_BT2020_CONST_LUM)
151  return AVCOL_SPC_BT2020_CL;
152  else
153  return AVCOL_SPC_BT2020_NCL;
154  default:
155  break;
156  }
157 
158  return AVCOL_SPC_UNSPECIFIED;
159 }
160 
162 {
163  enum v4l2_ycbcr_encoding ycbcr;
164  enum v4l2_xfer_func xfer;
165  enum v4l2_colorspace cs;
166 
167  cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
168  buf->context->format.fmt.pix_mp.colorspace :
169  buf->context->format.fmt.pix.colorspace;
170 
171  ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
172  buf->context->format.fmt.pix_mp.ycbcr_enc:
173  buf->context->format.fmt.pix.ycbcr_enc;
174 
175  xfer = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
176  buf->context->format.fmt.pix_mp.xfer_func:
177  buf->context->format.fmt.pix.xfer_func;
178 
179  switch (xfer) {
180  case V4L2_XFER_FUNC_709: return AVCOL_TRC_BT709;
181  case V4L2_XFER_FUNC_SRGB: return AVCOL_TRC_IEC61966_2_1;
182  default:
183  break;
184  }
185 
186  switch (cs) {
187  case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_TRC_GAMMA22;
188  case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_TRC_GAMMA28;
189  case V4L2_COLORSPACE_SMPTE170M: return AVCOL_TRC_SMPTE170M;
190  case V4L2_COLORSPACE_SMPTE240M: return AVCOL_TRC_SMPTE240M;
191  default:
192  break;
193  }
194 
195  switch (ycbcr) {
196  case V4L2_YCBCR_ENC_XV709:
197  case V4L2_YCBCR_ENC_XV601: return AVCOL_TRC_BT1361_ECG;
198  default:
199  break;
200  }
201 
202  return AVCOL_TRC_UNSPECIFIED;
203 }
204 
205 static void v4l2_free_buffer(void *opaque, uint8_t *unused)
206 {
207  V4L2Buffer* avbuf = opaque;
208  V4L2m2mContext *s = buf_to_m2mctx(avbuf);
209 
210  atomic_fetch_sub_explicit(&s->refcount, 1, memory_order_acq_rel);
211  if (s->reinit) {
212  if (!atomic_load(&s->refcount))
213  sem_post(&s->refsync);
214  return;
215  }
216 
217  if (avbuf->context->streamon) {
218  ff_v4l2_buffer_enqueue(avbuf);
219  return;
220  }
221 
222  if (!atomic_load(&s->refcount))
224 }
225 
227 {
229 
230  if (plane >= in->num_planes)
231  return AVERROR(EINVAL);
232 
233  /* even though most encoders return 0 in data_offset encoding vp8 does require this value */
234  *buf = av_buffer_create((char *)in->plane_info[plane].mm_addr + in->planes[plane].data_offset,
235  in->plane_info[plane].length, v4l2_free_buffer, in, 0);
236  if (!*buf)
237  return AVERROR(ENOMEM);
238 
239  in->status = V4L2BUF_RET_USER;
240  atomic_fetch_add_explicit(&s->refcount, 1, memory_order_relaxed);
241 
242  return 0;
243 }
244 
245 static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t* data, int size, AVBufferRef* bref)
246 {
247  unsigned int bytesused, length;
248 
249  if (plane >= out->num_planes)
250  return AVERROR(EINVAL);
251 
252  bytesused = FFMIN(size, out->plane_info[plane].length);
253  length = out->plane_info[plane].length;
254 
255  memcpy(out->plane_info[plane].mm_addr, data, FFMIN(size, out->plane_info[plane].length));
256 
257  if (V4L2_TYPE_IS_MULTIPLANAR(out->buf.type)) {
258  out->planes[plane].bytesused = bytesused;
259  out->planes[plane].length = length;
260  } else {
261  out->buf.bytesused = bytesused;
262  out->buf.length = length;
263  }
264 
265  return 0;
266 }
267 
268 /******************************************************************************
269  *
270  * V4L2uffer interface
271  *
272  ******************************************************************************/
273 
275 {
276  int i, ret;
277 
278  for(i = 0; i < out->num_planes; i++) {
279  ret = v4l2_bufref_to_buf(out, i, frame->buf[i]->data, frame->buf[i]->size, frame->buf[i]);
280  if (ret)
281  return ret;
282  }
283 
284  v4l2_set_pts(out, frame->pts);
285 
286  return 0;
287 }
288 
290 {
291  V4L2m2mContext *s = buf_to_m2mctx(avbuf);
292  int i, ret;
293 
294  av_frame_unref(frame);
295 
296  /* 1. get references to the actual data */
297  for (i = 0; i < avbuf->num_planes; i++) {
298  ret = v4l2_buf_to_bufref(avbuf, i, &frame->buf[i]);
299  if (ret)
300  return ret;
301 
302  frame->linesize[i] = avbuf->plane_info[i].bytesperline;
303  frame->data[i] = frame->buf[i]->data;
304  }
305 
306  /* 1.1 fixup special cases */
307  switch (avbuf->context->av_pix_fmt) {
308  case AV_PIX_FMT_NV12:
309  if (avbuf->num_planes > 1)
310  break;
311  frame->linesize[1] = avbuf->plane_info[0].bytesperline;
312  frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
313  break;
314  default:
315  break;
316  }
317 
318  /* 2. get frame information */
319  frame->key_frame = !!(avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME);
320  frame->format = avbuf->context->av_pix_fmt;
322  frame->colorspace = v4l2_get_color_space(avbuf);
323  frame->color_range = v4l2_get_color_range(avbuf);
324  frame->color_trc = v4l2_get_color_trc(avbuf);
325  frame->pts = v4l2_get_pts(avbuf);
326 
327  /* these two values are updated also during re-init in v4l2_process_driver_event */
328  frame->height = s->output.height;
329  frame->width = s->output.width;
330 
331  /* 3. report errors upstream */
332  if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
333  av_log(logger(avbuf), AV_LOG_ERROR, "%s: driver decode error\n", avbuf->context->name);
335  }
336 
337  return 0;
338 }
339 
341 {
342  int ret;
343 
344  av_packet_unref(pkt);
345  ret = v4l2_buf_to_bufref(avbuf, 0, &pkt->buf);
346  if (ret)
347  return ret;
348 
349  pkt->size = V4L2_TYPE_IS_MULTIPLANAR(avbuf->buf.type) ? avbuf->buf.m.planes[0].bytesused : avbuf->buf.bytesused;
350  pkt->data = pkt->buf->data;
351 
352  if (avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME)
353  pkt->flags |= AV_PKT_FLAG_KEY;
354 
355  if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
356  av_log(logger(avbuf), AV_LOG_ERROR, "%s driver encode error\n", avbuf->context->name);
357  pkt->flags |= AV_PKT_FLAG_CORRUPT;
358  }
359 
360  pkt->dts = pkt->pts = v4l2_get_pts(avbuf);
361 
362  return 0;
363 }
364 
366 {
367  int ret;
368 
369  ret = v4l2_bufref_to_buf(out, 0, pkt->data, pkt->size, pkt->buf);
370  if (ret)
371  return ret;
372 
373  v4l2_set_pts(out, pkt->pts);
374 
375  if (pkt->flags & AV_PKT_FLAG_KEY)
376  out->flags = V4L2_BUF_FLAG_KEYFRAME;
377 
378  return 0;
379 }
380 
382 {
383  V4L2Context *ctx = avbuf->context;
384  int ret, i;
385 
386  avbuf->buf.memory = V4L2_MEMORY_MMAP;
387  avbuf->buf.type = ctx->type;
388  avbuf->buf.index = index;
389 
390  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
391  avbuf->buf.length = VIDEO_MAX_PLANES;
392  avbuf->buf.m.planes = avbuf->planes;
393  }
394 
395  ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QUERYBUF, &avbuf->buf);
396  if (ret < 0)
397  return AVERROR(errno);
398 
399  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
400  avbuf->num_planes = 0;
401  for (;;) {
402  /* in MP, the V4L2 API states that buf.length means num_planes */
403  if (avbuf->num_planes >= avbuf->buf.length)
404  break;
405  if (avbuf->buf.m.planes[avbuf->num_planes].length)
406  avbuf->num_planes++;
407  }
408  } else
409  avbuf->num_planes = 1;
410 
411  for (i = 0; i < avbuf->num_planes; i++) {
412 
413  avbuf->plane_info[i].bytesperline = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ?
414  ctx->format.fmt.pix_mp.plane_fmt[i].bytesperline :
415  ctx->format.fmt.pix.bytesperline;
416 
417  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
418  avbuf->plane_info[i].length = avbuf->buf.m.planes[i].length;
419  avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.m.planes[i].length,
420  PROT_READ | PROT_WRITE, MAP_SHARED,
421  buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.planes[i].m.mem_offset);
422  } else {
423  avbuf->plane_info[i].length = avbuf->buf.length;
424  avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.length,
425  PROT_READ | PROT_WRITE, MAP_SHARED,
426  buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.offset);
427  }
428 
429  if (avbuf->plane_info[i].mm_addr == MAP_FAILED)
430  return AVERROR(ENOMEM);
431  }
432 
433  avbuf->status = V4L2BUF_AVAILABLE;
434 
435  if (V4L2_TYPE_IS_OUTPUT(ctx->type))
436  return 0;
437 
438  if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
439  avbuf->buf.m.planes = avbuf->planes;
440  avbuf->buf.length = avbuf->num_planes;
441 
442  } else {
443  avbuf->buf.bytesused = avbuf->planes[0].bytesused;
444  avbuf->buf.length = avbuf->planes[0].length;
445  }
446 
447  return ff_v4l2_buffer_enqueue(avbuf);
448 }
449 
451 {
452  int ret;
453 
454  avbuf->buf.flags = avbuf->flags;
455 
456  ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QBUF, &avbuf->buf);
457  if (ret < 0)
458  return AVERROR(errno);
459 
460  avbuf->status = V4L2BUF_IN_DRIVER;
461 
462  return 0;
463 }
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:486
int plane
Definition: avisynth_c.h:422
#define NULL
Definition: coverity.c:32
const char * s
Definition: avisynth_c.h:768
static AVCodecContext * logger(V4L2Buffer *buf)
Definition: v4l2_buffers.c:45
This structure describes decoded (raw) audio or video data.
Definition: frame.h:201
const char * name
context name.
Definition: v4l2_context.h:40
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
AVCodecContext * avctx
Definition: v4l2_m2m.h:57
static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t *data, int size, AVBufferRef *bref)
Definition: v4l2_buffers.c:245
static enum AVColorTransferCharacteristic v4l2_get_color_trc(V4L2Buffer *buf)
Definition: v4l2_buffers.c:161
int ff_v4l2_buffer_buf_to_avpkt(AVPacket *pkt, V4L2Buffer *avbuf)
Extracts the data from a V4L2Buffer to an AVPacket.
Definition: v4l2_buffers.c:340
int ff_v4l2_buffer_initialize(V4L2Buffer *avbuf, int index)
Initializes a V4L2Buffer.
Definition: v4l2_buffers.c:381
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:393
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:490
int size
Definition: avcodec.h:1680
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:491
int width
Width and height of the frames it produces (in case of a capture context, e.g.
Definition: v4l2_context.h:71
int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
Extracts the data from an AVFrame to a V4L2Buffer.
Definition: v4l2_buffers.c:274
static AVPacket pkt
enum V4L2Buffer_status status
Definition: v4l2_buffers.h:58
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
Definition: pixfmt.h:485
static enum AVColorSpace v4l2_get_color_space(V4L2Buffer *buf)
Definition: v4l2_buffers.c:129
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:455
functionally identical to above
Definition: pixfmt.h:492
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
Definition: avcodec.h:1898
#define USEC_PER_SEC
Definition: v4l2_buffers.c:36
static uint64_t v4l2_get_pts(V4L2Buffer *avbuf)
Definition: v4l2_buffers.c:65
uint8_t
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:484
static enum AVColorPrimaries v4l2_get_color_primaries(V4L2Buffer *buf)
Definition: v4l2_buffers.c:77
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:460
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:294
struct V4L2Buffer::V4L2Plane_info plane_info[VIDEO_MAX_PLANES]
static AVFrame * frame
int ff_v4l2_buffer_enqueue(V4L2Buffer *avbuf)
Enqueues a V4L2Buffer.
Definition: v4l2_buffers.c:450
uint8_t * data
Definition: avcodec.h:1679
AVColorRange
MPEG vs JPEG YUV range.
Definition: pixfmt.h:507
ptrdiff_t size
Definition: opengl_enc.c:101
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:431
#define sem_post(psem)
Definition: semaphore.h:26
#define av_log(a,...)
struct V4L2Context * context
Definition: v4l2_buffers.h:42
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1711
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
static enum AVColorRange v4l2_get_color_range(V4L2Buffer *buf)
Definition: v4l2_buffers.c:111
also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:436
int width
Definition: frame.h:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
ITU-R BT1361 Extended Colour Gamut.
Definition: pixfmt.h:468
#define atomic_load(object)
Definition: stdatomic.h:93
#define AVERROR(e)
Definition: error.h:43
struct v4l2_buffer buf
Definition: v4l2_buffers.h:54
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:446
int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf)
Extracts the data from a V4L2Buffer to an AVFrame.
Definition: v4l2_buffers.c:289
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: avcodec.h:1662
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:457
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:90
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B
Definition: pixfmt.h:433
GLsizei GLsizei * length
Definition: opengl_enc.c:115
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
enum AVPixelFormat av_pix_fmt
AVPixelFormat corresponding to this buffer context.
Definition: v4l2_context.h:53
int ff_v4l2_m2m_codec_end(AVCodecContext *avctx)
Releases all the codec resources if all AVBufferRefs have been returned to the ctx.
Definition: v4l2_m2m.c:314
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1685
int streamon
Whether the stream has been started (VIDIOC_STREAMON has been sent).
Definition: v4l2_context.h:86
#define FFMIN(a, b)
Definition: common.h:96
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:495
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:438
AVFormatContext * ctx
Definition: movenc.c:48
struct v4l2_plane planes[VIDEO_MAX_PLANES]
Definition: v4l2_buffers.h:55
FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
Definition: pixfmt.h:489
the normal 2^n-1 "JPEG" YUV ranges
Definition: pixfmt.h:510
struct v4l2_format format
Format returned by the driver after initializing the buffer context.
Definition: v4l2_context.h:65
#define atomic_fetch_add_explicit(object, operand, order)
Definition: stdatomic.h:149
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:274
also ITU-R BT1361
Definition: pixfmt.h:457
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Definition: pixfmt.h:462
Libavcodec external API header.
#define atomic_fetch_sub_explicit(object, operand, order)
Definition: stdatomic.h:152
functionally identical to above
Definition: pixfmt.h:440
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:232
atomic_uint refcount
Definition: v4l2_m2m.h:54
main external API structure.
Definition: avcodec.h:1761
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:618
uint8_t * data
The data buffer.
Definition: buffer.h:89
V4L2Buffer (wrapper for v4l2_buffer management)
Definition: v4l2_buffers.h:40
static int v4l2_buf_to_bufref(V4L2Buffer *in, int plane, AVBufferRef **buf)
Definition: v4l2_buffers.c:226
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> in
int ff_v4l2_buffer_avpkt_to_buf(const AVPacket *pkt, V4L2Buffer *out)
Extracts the data from an AVPacket to a V4L2Buffer.
Definition: v4l2_buffers.c:365
void * buf
Definition: avisynth_c.h:690
int index
Definition: gxfenc.c:89
#define container_of(ptr, type, member)
Definition: v4l2_m2m.h:35
Rational number (pair of numerator and denominator).
Definition: rational.h:58
#define FF_DECODE_ERROR_INVALID_BITSTREAM
Definition: frame.h:498
int decode_error_flags
decode error flags of the frame, set to a combination of FF_DECODE_ERROR_xxx flags if the decoder pro...
Definition: frame.h:497
int size
Size of data in bytes.
Definition: buffer.h:93
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:505
static int64_t pts
Global timestamp for the audio frames.
V4L2Context output
Definition: v4l2_m2m.h:51
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
the normal 219*2^(n-8) "MPEG" YUV ranges
Definition: pixfmt.h:509
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:496
A reference to a data buffer.
Definition: buffer.h:81
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:469
common internal api header.
also ITU-R BT470BG
Definition: pixfmt.h:461
#define AV_PKT_FLAG_CORRUPT
The packet content is corrupted.
Definition: avcodec.h:1712
int num_planes
Definition: v4l2_buffers.h:51
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:279
enum AVColorPrimaries color_primaries
Definition: frame.h:448
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1678
static V4L2m2mContext * buf_to_m2mctx(V4L2Buffer *buf)
Definition: v4l2_buffers.c:38
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:439
ITU-R BT2020.
Definition: pixfmt.h:442
int height
Definition: frame.h:259
FILE * out
Definition: movenc.c:54
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:450
sem_t refsync
Definition: v4l2_m2m.h:58
This structure stores compressed data.
Definition: avcodec.h:1656
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1672
static void v4l2_free_buffer(void *opaque, uint8_t *unused)
Definition: v4l2_buffers.c:205
static void v4l2_set_pts(V4L2Buffer *out, int64_t pts)
Definition: v4l2_buffers.c:50
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
enum v4l2_buf_type type
Type of this buffer context.
Definition: v4l2_context.h:47