Go to the documentation of this file.
27 #define Y4M_LINE_MAX 256
33 int raten, rated, aspectn, aspectd,
n;
35 const char *colorspace =
"";
36 const char *colorrange =
"";
51 if (aspectn == 0 && aspectd == 1)
56 if (field_order != st->codec->field_order && st->codec->field_order !=
AV_FIELD_UNKNOWN)
57 field_order = st->codec->field_order;
63 colorrange =
" XCOLORRANGE=LIMITED";
66 colorrange =
" XCOLORRANGE=FULL";
72 switch (field_order) {
77 default: inter =
'p';
break;
82 colorspace =
" Cmono";
85 colorspace =
" Cmono9";
88 colorspace =
" Cmono10";
91 colorspace =
" Cmono12";
94 colorspace =
" Cmono16";
97 colorspace =
" C411 XYSCSS=411";
100 colorspace =
" C420jpeg XYSCSS=420JPEG";
101 colorrange =
" XCOLORRANGE=FULL";
104 colorspace =
" C422 XYSCSS=422";
105 colorrange =
" XCOLORRANGE=FULL";
108 colorspace =
" C444 XYSCSS=444";
109 colorrange =
" XCOLORRANGE=FULL";
115 default: colorspace =
" C420jpeg XYSCSS=420JPEG";
break;
119 colorspace =
" C422 XYSCSS=422";
122 colorspace =
" C444 XYSCSS=444";
125 colorspace =
" C420p9 XYSCSS=420P9";
128 colorspace =
" C422p9 XYSCSS=422P9";
131 colorspace =
" C444p9 XYSCSS=444P9";
134 colorspace =
" C420p10 XYSCSS=420P10";
137 colorspace =
" C422p10 XYSCSS=422P10";
140 colorspace =
" C444p10 XYSCSS=444P10";
143 colorspace =
" C420p12 XYSCSS=420P12";
146 colorspace =
" C422p12 XYSCSS=422P12";
149 colorspace =
" C444p12 XYSCSS=444P12";
152 colorspace =
" C420p14 XYSCSS=420P14";
155 colorspace =
" C422p14 XYSCSS=422P14";
158 colorspace =
" C444p14 XYSCSS=444P14";
161 colorspace =
" C420p16 XYSCSS=420P16";
164 colorspace =
" C422p16 XYSCSS=422P16";
167 colorspace =
" C444p16 XYSCSS=444P16";
174 aspectn, aspectd, colorspace, colorrange);
185 int* first_pkt =
s->priv_data;
198 "Error. YUV4MPEG stream header write failed.\n");
212 ptr =
frame->data[0];
254 ptr +=
frame->linesize[0];
267 ptr1 =
frame->data[1];
268 ptr2 =
frame->data[2];
271 ptr1 +=
frame->linesize[1];
275 ptr2 +=
frame->linesize[2];
284 int *first_pkt =
s->priv_data;
286 if (
s->nb_streams != 1)
294 switch (
s->streams[0]->codecpar->format) {
297 "stream, some mjpegtools might not work.\n");
329 "Use '-strict -1' to encode to this pixel format.\n",
334 "Mjpegtools will not work.\n");
338 "yuv444p, yuv422p, yuv420p, yuv411p and gray8 pixel formats. "
339 "And using 'strict -1' also yuv444p9, yuv422p9, yuv420p9, "
340 "yuv444p10, yuv422p10, yuv420p10, "
341 "yuv444p12, yuv422p12, yuv420p12, "
342 "yuv444p14, yuv422p14, yuv420p14, "
343 "yuv444p16, yuv422p16, yuv420p16, "
344 "gray9, gray10, gray12 "
345 "and gray16 pixel formats. "
346 "Use -pix_fmt to select one.\n");
355 .
name =
"yuv4mpegpipe",
358 .priv_data_size =
sizeof(
int),
#define FF_ENABLE_DEPRECATION_WARNINGS
#define AV_LOG_WARNING
Something somehow does not look correct.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
This structure describes decoded (raw) audio or video data.
@ AVCOL_RANGE_JPEG
the normal 2^n-1 "JPEG" YUV ranges
#define AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV422P9
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
#define AV_PIX_FMT_GRAY16
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
static int yuv4_generate_header(AVFormatContext *s, char *buf)
#define AV_PIX_FMT_YUV444P10
static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define AV_PIX_FMT_YUV422P16
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
#define AV_PIX_FMT_YUV444P16
#define AV_CEIL_RSHIFT(a, b)
#define AV_PIX_FMT_YUV420P9
#define AV_PIX_FMT_YUV420P16
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
@ AV_CODEC_ID_WRAPPED_AVFRAME
Passthrough codec, AVFrames wrapped in AVPacket.
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
#define AV_PIX_FMT_GRAY10
AVCodecParameters * codecpar
Codec parameters associated with this stream.
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
#define AV_PIX_FMT_YUV422P10
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
#define AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV444P12
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown)
void avio_write(AVIOContext *s, const unsigned char *buf, int size)
#define FF_COMPLIANCE_NORMAL
static int yuv4_write_header(AVFormatContext *s)
static void write_packet(OutputFile *of, AVPacket *pkt, OutputStream *ost, int unqueue)
#define i(width, name, range_min, range_max)
AVOutputFormat ff_yuv4mpegpipe_muxer
enum AVColorRange color_range
Video only.
@ AVCOL_RANGE_MPEG
the normal 219*2^(n-8) "MPEG" YUV ranges
#define AV_PIX_FMT_YUV444P9
enum AVFieldOrder field_order
Video only.
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define AV_PIX_FMT_YUV420P12
enum AVChromaLocation chroma_location
#define AV_PIX_FMT_YUV422P14
int avio_printf(AVIOContext *s, const char *fmt,...) av_printf_format(2
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
#define FF_DISABLE_DEPRECATION_WARNINGS
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
This structure stores compressed data.
@ AV_PIX_FMT_YUV411P
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
#define AV_PIX_FMT_YUV444P14
static void write_header(FFV1Context *f)
#define AV_PIX_FMT_GRAY12
#define AV_PIX_FMT_YUV420P14
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.