Go to the documentation of this file.
   33 #include <stdatomic.h> 
   57 #define V4L_ALLFORMATS  3 
   58 #define V4L_RAWFORMATS  1 
   59 #define V4L_COMPFORMATS 2 
   64 #define V4L_TS_DEFAULT  0 
   74 #define V4L_TS_MONO2ABS 2 
   81 #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT 
  111     int (*
open_f)(
const char *file, 
int oflag, ...);
 
  115     int (*
ioctl_f)(
int fd, 
int request, ...);
 
  132     struct v4l2_capability cap;
 
  137 #define SET_WRAPPERS(prefix) do {       \ 
  138     s->open_f   = prefix ## open;       \ 
  139     s->close_f  = prefix ## close;      \ 
  140     s->dup_f    = prefix ## dup;        \ 
  141     s->ioctl_f  = prefix ## ioctl;      \ 
  142     s->read_f   = prefix ## read;       \ 
  143     s->mmap_f   = prefix ## mmap;       \ 
  144     s->munmap_f = prefix ## munmap;     \ 
  147     if (
s->use_libv4l2) {
 
  158 #define v4l2_open   s->open_f 
  159 #define v4l2_close  s->close_f 
  160 #define v4l2_dup    s->dup_f 
  161 #define v4l2_ioctl  s->ioctl_f 
  162 #define v4l2_read   s->read_f 
  163 #define v4l2_mmap   s->mmap_f 
  164 #define v4l2_munmap s->munmap_f 
  178     if (
v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
 
  186            fd, cap.capabilities);
 
  188     if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
 
  190         s->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
 
  191     } 
else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
 
  193         s->buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
 
  200     if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
 
  202                "The device does not support the streaming I/O method.\n");
 
  215                        uint32_t pixelformat)
 
  218     struct v4l2_format fmt = { .type = 
s->buf_type };
 
  221     fmt.fmt.pix.width = *
width;
 
  222     fmt.fmt.pix.height = *
height;
 
  223     fmt.fmt.pix.pixelformat = pixelformat;
 
  224     fmt.fmt.pix.field = V4L2_FIELD_ANY;
 
  231     if ((*
width != fmt.fmt.pix.width) || (*
height != fmt.fmt.pix.height)) {
 
  233                "The V4L2 driver changed the video from %dx%d to %dx%d\n",
 
  234                *
width, *
height, fmt.fmt.pix.width, fmt.fmt.pix.height);
 
  235         *
width = fmt.fmt.pix.width;
 
  236         *
height = fmt.fmt.pix.height;
 
  239     if (pixelformat != fmt.fmt.pix.pixelformat) {
 
  241                "The V4L2 driver changed the pixel format " 
  242                "from 0x%08X to 0x%08X\n",
 
  243                pixelformat, fmt.fmt.pix.pixelformat);
 
  247     if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
 
  249                "The V4L2 driver is using the interlaced mode\n");
 
  264     if (std & V4L2_STD_NTSC)
 
  270 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE 
  274     struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
 
  276     while(!
v4l2_ioctl(
s->fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
 
  278         case V4L2_FRMSIZE_TYPE_DISCRETE:
 
  280                    vfse.discrete.width, vfse.discrete.height);
 
  282         case V4L2_FRMSIZE_TYPE_CONTINUOUS:
 
  283         case V4L2_FRMSIZE_TYPE_STEPWISE:
 
  285                    vfse.stepwise.min_width,
 
  286                    vfse.stepwise.max_width,
 
  287                    vfse.stepwise.step_width,
 
  288                    vfse.stepwise.min_height,
 
  289                    vfse.stepwise.max_height,
 
  290                    vfse.stepwise.step_height);
 
  300     struct v4l2_fmtdesc vfd = { .type = 
s->buf_type };
 
  308         if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
 
  312                    fmt_name ? fmt_name : 
"Unsupported",
 
  314         } 
else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
 
  324 #ifdef V4L2_FMT_FLAG_EMULATED 
  325         if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
 
  328 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE 
  329         list_framesizes(
ctx, vfd.pixelformat);
 
  339     struct v4l2_standard standard;
 
  344     for (standard.index = 0; ; standard.index++) {
 
  345         if (
v4l2_ioctl(
s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
 
  355                standard.index, (uint64_t)standard.id, standard.name);
 
  363     struct v4l2_requestbuffers req = {
 
  366         .memory = V4L2_MEMORY_MMAP
 
  379     s->buffers = req.count;
 
  392     for (
i = 0; 
i < req.count; 
i++) {
 
  393         unsigned int buf_length, buf_offset;
 
  394         struct v4l2_plane 
planes[VIDEO_MAX_PLANES];
 
  395         struct v4l2_buffer buf = {
 
  398             .memory = V4L2_MEMORY_MMAP,
 
  400             .length   = 
s->multiplanar ? VIDEO_MAX_PLANES : 0,
 
  402         if (
v4l2_ioctl(
s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
 
  408         if (
s->multiplanar) {
 
  409             if (buf.length != 1) {
 
  413             buf_length = buf.m.planes[0].length;
 
  414             buf_offset = buf.m.planes[0].m.mem_offset;
 
  416             buf_length = buf.length;
 
  417             buf_offset = buf.m.offset;
 
  420         s->buf_len[
i] = buf_length;
 
  421         if (
s->frame_size > 0 && 
s->buf_len[
i] < 
s->frame_size) {
 
  423                    "buf_len[%d] = %d < expected frame size %d\n",
 
  424                    i, 
s->buf_len[
i], 
s->frame_size);
 
  428                                PROT_READ | PROT_WRITE, MAP_SHARED,
 
  431         if (
s->buf_start[
i] == MAP_FAILED) {
 
  457     struct v4l2_plane 
planes[VIDEO_MAX_PLANES];
 
  458     struct v4l2_buffer buf = { 0 };
 
  459     struct buff_data *buf_descriptor = opaque;
 
  462     buf.type = 
s->buf_type;
 
  463     buf.memory = V4L2_MEMORY_MMAP;
 
  464     buf.index = buf_descriptor->
index;
 
  466     buf.length   = 
s->multiplanar ? VIDEO_MAX_PLANES : 0;
 
  472 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC) 
  473 static int64_t av_gettime_monotonic(
void)
 
  486         ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * 
AV_TIME_BASE) {
 
  491 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC) 
  493         now = av_gettime_monotonic();
 
  495             (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * 
AV_TIME_BASE)) {
 
  521 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC) 
  524         int64_t nowm = av_gettime_monotonic();
 
  526         s->last_time_m = nowm;
 
  536     struct v4l2_plane 
planes[VIDEO_MAX_PLANES];
 
  537     struct v4l2_buffer buf = {
 
  539         .memory = V4L2_MEMORY_MMAP,
 
  541         .length   = 
s->multiplanar ? VIDEO_MAX_PLANES : 0,
 
  543     struct timeval buf_ts;
 
  544     unsigned int bytesused;
 
  550     while ((res = 
v4l2_ioctl(
s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
 
  561     buf_ts = buf.timestamp;
 
  563     if (buf.index >= 
s->buffers) {
 
  571     bytesused = 
s->multiplanar ? buf.m.planes[0].bytesused : buf.bytesused;
 
  573 #ifdef V4L2_BUF_FLAG_ERROR 
  574     if (buf.flags & V4L2_BUF_FLAG_ERROR) {
 
  576                "Dequeued v4l2 buffer contains corrupted data (%d bytes).\n",
 
  585             s->frame_size = bytesused;
 
  587         if (
s->frame_size > 0 && bytesused != 
s->frame_size) {
 
  589                    "Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n",
 
  590                    bytesused, 
s->frame_size, buf.flags);
 
  604         memcpy(
pkt->
data, 
s->buf_start[buf.index], bytesused);
 
  614         pkt->
data     = 
s->buf_start[buf.index];
 
  618         if (!buf_descriptor) {
 
  627         buf_descriptor->
index = buf.index;
 
  628         buf_descriptor->
s     = 
s;
 
  639     pkt->
pts = buf_ts.tv_sec * INT64_C(1000000) + buf_ts.tv_usec;
 
  648     enum v4l2_buf_type 
type;
 
  651     for (
i = 0; 
i < 
s->buffers; 
i++) {
 
  652         struct v4l2_plane 
planes[VIDEO_MAX_PLANES];
 
  653         struct v4l2_buffer buf = {
 
  656             .memory = V4L2_MEMORY_MMAP,
 
  658             .length   = 
s->multiplanar ? VIDEO_MAX_PLANES : 0,
 
  683     enum v4l2_buf_type 
type;
 
  691     for (
i = 0; 
i < 
s->buffers; 
i++) {
 
  701     struct v4l2_standard standard = { 0 };
 
  702     struct v4l2_streamparm streamparm = { 0 };
 
  703     struct v4l2_fract *tpf;
 
  721                 if (
v4l2_ioctl(
s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
 
  733             if (
v4l2_ioctl(
s->fd, VIDIOC_S_STD, &standard.id) < 0) {
 
  740                    "This device does not support any standard\n");
 
  746         tpf = &standard.frameperiod;
 
  749             if (
v4l2_ioctl(
s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
 
  756                     tpf = &streamparm.parm.capture.timeperframe;
 
  762             if (standard.id == 
s->std_id) {
 
  764                        "Current standard: %s, id: %"PRIx64
", frameperiod: %d/%d\n",
 
  765                        standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
 
  770         tpf = &streamparm.parm.capture.timeperframe;
 
  773     streamparm.type = 
s->buf_type;
 
  774     if (
v4l2_ioctl(
s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
 
  777     } 
else if (framerate_q.
num && framerate_q.
den) {
 
  778         if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
 
  779             tpf = &streamparm.parm.capture.timeperframe;
 
  782                    framerate_q.
den, framerate_q.
num);
 
  783             tpf->numerator   = framerate_q.
den;
 
  784             tpf->denominator = framerate_q.
num;
 
  786             if (
v4l2_ioctl(
s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
 
  793             if (framerate_q.
num != tpf->denominator ||
 
  794                 framerate_q.
den != tpf->numerator) {
 
  796                        "The driver changed the time per frame from " 
  798                        framerate_q.
den, framerate_q.
num,
 
  799                        tpf->numerator, tpf->denominator);
 
  803                    "The driver does not permit changing the time per frame\n");
 
  806     if (tpf->denominator > 0 && tpf->numerator > 0) {
 
  820                            uint32_t *desired_format,
 
  827     if (*desired_format) {
 
  836     if (!*desired_format) {
 
  854         if (*desired_format == 0) {
 
  856                    "codec '%s' (id %d), pixel format '%s' (id %d)\n",
 
  881     uint32_t desired_format;
 
  884     struct v4l2_input 
input = { 0 };
 
  894         v4l2_log_file = fopen(
"/dev/null", 
"w");
 
  901     if (
s->channel != -1) {
 
  904         if (
v4l2_ioctl(
s->fd, VIDIOC_S_INPUT, &
s->channel) < 0) {
 
  911         if (
v4l2_ioctl(
s->fd, VIDIOC_G_INPUT, &
s->channel) < 0) {
 
  929     if (
s->list_format) {
 
  935     if (
s->list_standard) {
 
  943     if (
s->pixel_format) {
 
  960     if (!
s->width && !
s->height) {
 
  961         struct v4l2_format fmt = { .type = 
s->buf_type };
 
  964                "Querying the device for the current frame size\n");
 
  972         s->width  = fmt.fmt.pix.width;
 
  973         s->height = fmt.fmt.pix.height;
 
  975                "Setting frame size to %dx%d\n", 
s->width, 
s->height);
 
  992     s->pixelformat = desired_format;
 
 1000                                                  s->width, 
s->height, 1);
 
 1016     if (desired_format == V4L2_PIX_FMT_YVU420)
 
 1018     else if (desired_format == V4L2_PIX_FMT_YVU410)
 
 1060     return !strncmp(
name, 
"video", 5) ||
 
 1061            !strncmp(
name, 
"radio", 5) ||
 
 1062            !strncmp(
name, 
"vbi", 3) ||
 
 1063            !strncmp(
name, 
"v4l-subdev", 10);
 
 1070     struct dirent *
entry;
 
 1076     dir = opendir(
"/dev");
 
 1082     while ((
entry = readdir(dir))) {
 
 1084         struct v4l2_capability cap;
 
 1086         char device_name[256];
 
 1092         if (
size >= 
sizeof(device_name)) {
 
 1101         if (
v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
 
 1139 #define OFFSET(x) offsetof(struct video_data, x) 
 1140 #define DEC AV_OPT_FLAG_DECODING_PARAM 
 1150     { 
"list_formats", 
"list available formats and exit",                          
OFFSET(list_format),  
AV_OPT_TYPE_INT,    {.i64 = 0 },  0, INT_MAX, 
DEC, .unit = 
"list_formats" },
 
 1155     { 
"list_standards", 
"list supported standards and exit",                      
OFFSET(list_standard), 
AV_OPT_TYPE_INT,   {.i64 = 0 },  0, 1, 
DEC, .unit = 
"list_standards" },
 
 1156     { 
"all",            
"show all supported standards",                           
OFFSET(list_standard), 
AV_OPT_TYPE_CONST, {.i64 = 1 },  0, 0, 
DEC, .unit = 
"list_standards" },
 
 1158     { 
"timestamps",   
"set type of timestamps for grabbed frames",                
OFFSET(ts_mode),      
AV_OPT_TYPE_INT,    {.i64 = 0 }, 0, 2, 
DEC, .unit = 
"timestamps" },
 
 1159     { 
"ts",           
"set type of timestamps for grabbed frames",                
OFFSET(ts_mode),      
AV_OPT_TYPE_INT,    {.i64 = 0 }, 0, 2, 
DEC, .unit = 
"timestamps" },
 
 1163     { 
"use_libv4l2",  
"use libv4l2 (v4l-utils) conversion functions",             
OFFSET(use_libv4l2),  
AV_OPT_TYPE_BOOL,   {.i64 = 0}, 0, 1, 
DEC },
 
 1176     .
p.
name          = 
"video4linux2,v4l2",
 
  
const FFInputFormat ff_v4l2_demuxer
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Opaque type representing a time filter state.
int64_t av_gettime_relative(void)
Get the current time in microseconds since some unspecified starting point.
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
#define atomic_store(object, desired)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
enum AVMediaType codec_type
General type of the encoded data.
static const AVClass v4l2_class
#define atomic_fetch_add(object, operand)
AVStream * avformat_new_stream(AVFormatContext *s, const struct AVCodec *c)
Add a new stream to a media file.
static int v4l2_set_parameters(AVFormatContext *ctx)
char * device_name
device name, format depends on device
int av_strcasecmp(const char *a, const char *b)
Locale-independent case-insensitive compare.
int nb_devices
number of autodetected devices
AVStream ** streams
A list of all streams in the file.
AVRational avg_frame_rate
Average framerate.
static int mmap_init(AVFormatContext *ctx)
#define AV_LOG_VERBOSE
Detailed information.
uint32_t codec_tag
Additional information about the codec (corresponds to the AVI FOURCC).
enum AVCodecID video_codec_id
Forced video codec_id.
int(* ioctl_f)(int fd, unsigned long int request,...)
uint32_t ff_fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id)
const struct fmt_map ff_fmt_conversion_table[]
unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt)
Return a value representing the fourCC code associated to the pixel format pix_fmt,...
double ff_timefilter_eval(TimeFilter *self, double delta)
Evaluate the filter at a specified time.
enum AVCodecID ff_fmt_v4l2codec(uint32_t v4l2_fmt)
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
char * framerate
Set by a private option.
TimeFilter * ff_timefilter_new(double time_base, double period, double bandwidth)
Create a new Delay Locked Loop time filter.
AVDeviceInfo ** devices
list of autodetected devices
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
This struct describes the properties of a single codec described by an AVCodecID.
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
atomic_int buffers_queued
int flags
Flags modifying the (de)muxer behaviour.
static enum AVPixelFormat pix_fmt
static double av_q2d(AVRational a)
Convert an AVRational to a double.
#define av_assert0(cond)
assert() equivalent, that is always enabled.
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
#define atomic_load(object)
AVCodecParameters * codecpar
Codec parameters associated with this stream.
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Rational number (pair of numerator and denominator).
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without period
@ AV_OPT_TYPE_IMAGE_SIZE
Underlying C type is two consecutive integers.
const char * av_default_item_name(void *ptr)
Return the context name.
enum AVPixelFormat ff_fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
This structure contains the data a format has to probe a file.
@ AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT
int(* open_f)(const char *file, int oflag,...)
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
AVCodecID
Identify the syntax and semantics of the bitstream.
static int device_open(AVFormatContext *ctx, const char *device_path)
static void mmap_release_buffer(void *opaque, uint8_t *data)
static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
static int v4l2_read_close(AVFormatContext *ctx)
enum v4l2_buf_type buf_type
static void list_formats(AVFormatContext *ctx, int type)
static int v4l2_read_probe(const AVProbeData *p)
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
char * url
input or output URL.
Structure describes basic parameters of the device.
int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align)
Return the size in bytes of the amount of data required to store an image with the given parameters.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
int av_strstart(const char *str, const char *pfx, const char **ptr)
Return non-zero if pfx is a prefix of str.
char * device_description
human friendly name
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
#define AV_LOG_INFO
Standard information.
void avpriv_stream_set_need_parsing(AVStream *st, enum AVStreamParseType type)
const char * avcodec_get_name(enum AVCodecID id)
Get the name of a codec.
int av_parse_video_rate(AVRational *rate, const char *arg)
Parse str and store the detected values in *rate.
void ff_timefilter_destroy(TimeFilter *self)
Free all resources associated with the filter.
#define i(width, name, range_min, range_max)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
static void mmap_close(struct video_data *s)
static int v4l2_is_v4l_dev(const char *name)
#define AV_TIME_BASE
Internal time base represented as integer.
#define av_malloc_array(a, b)
static const AVOption options[]
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
double ff_timefilter_update(TimeFilter *self, double system_time, double period)
Update the filter.
#define V4L_TS_CONVERT_READY
Once the kind of timestamps returned by the kernel have been detected, the value of the timefilter (N...
static int v4l2_get_device_list(AVFormatContext *ctx, AVDeviceInfoList *device_list)
#define V4L_TS_MONO2ABS
Assume kernel timestamps are from the monotonic clock and convert to absolute timestamps.
ssize_t(* read_f)(int fd, void *buffer, size_t n)
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
int av_dynarray_add_nofree(void *tab_ptr, int *nb_ptr, void *elem)
Add an element to a dynamic array.
static const struct @455 planes[]
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
int list_format
Set by a private option.
@ AV_OPT_TYPE_INT
Underlying C type is int.
int list_standard
Set by a private option.
static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
char * pixel_format
Set by a private option.
int(* munmap_f)(void *_start, size_t length)
AVRational r_frame_rate
Real base framerate of the stream.
int64_t av_gettime(void)
Get the current time in microseconds.
#define SET_WRAPPERS(prefix)
char * av_strdup(const char *s)
Duplicate a string.
static int read_probe(const AVProbeData *p)
static int v4l2_read_header(AVFormatContext *ctx)
static int mmap_start(AVFormatContext *ctx)
enum AVCodecID codec_id
Specific type of the encoded data (the codec used).
This structure stores compressed data.
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
static void list_standards(AVFormatContext *ctx)
static int v4l2_read_packet(AVFormatContext *ctx, AVPacket *pkt)
#define flags(name, subs,...)
int64_t bit_rate
The average bitrate of the encoded data (in bits per second).
#define MKTAG(a, b, c, d)
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
#define V4L_TS_DEFAULT
Return timestamps to the user exactly as returned by the kernel.
const AVCodecDescriptor * avcodec_descriptor_get(enum AVCodecID id)
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
@ AV_OPT_TYPE_STRING
Underlying C type is a uint8_t* that is either NULL or points to a C string allocated with the av_mal...
const AVCodecDescriptor * avcodec_descriptor_get_by_name(const char *name)
static int first_field(const struct video_data *s)
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
void * priv_data
Format private data.
#define V4L_TS_ABS
Autodetect the kind of timestamps returned by the kernel and convert to absolute (wall clock) timesta...
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
static const int desired_video_buffers
static int device_try_init(AVFormatContext *ctx, enum AVPixelFormat pix_fmt, int *width, int *height, uint32_t *desired_format, enum AVCodecID *codec_id)