Go to the documentation of this file.
45 #define IOBUF_SIZE 4096
90 int bits_per_pixel,
int pass,
93 int x,
mask, dst_x, j,
b, bpp;
96 static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
99 switch (bits_per_pixel) {
101 memset(
dst, 0, row_size);
103 for (x = 0; x <
width; x++) {
105 if ((
mask << j) & 0x80) {
106 b = (
src[x >> 3] >> (7 - j)) & 1;
107 dst[dst_x >> 3] |=
b << (7 - (dst_x & 7));
113 bpp = bits_per_pixel >> 3;
116 for (x = 0; x <
width; x++) {
118 if ((
mask << j) & 0x80) {
132 for (
i = 0;
i <
w;
i++) {
133 int a,
b,
c,
p, pa, pb, pc;
146 if (pa <= pb && pa <= pc)
158 const uint8_t *
src1 =
src + bpp;
166 for (x = 0; x < unaligned_w; x++)
173 const uint8_t *
src,
const uint8_t *top,
int size,
int bpp)
177 switch (filter_type) {
188 for (
i = 0;
i < bpp;
i++)
194 for (
i = 0;
i < bpp;
i++)
199 av_unreachable(
"PNG_FILTER_VALUE_MIXED can't happen here and all others are covered");
204 const uint8_t *
src,
const uint8_t *top,
int size,
int bpp)
206 int pred =
s->filter_type;
212 int cost, bcost = INT_MAX;
219 cost +=
abs((int8_t) buf1[
i]);
222 FFSWAP(uint8_t *, buf1, buf2);
234 const uint8_t *buf,
int length)
240 bytestream_put_be32(
f, length);
242 crc =
av_crc(crc_table, crc, tagbuf, 4);
245 crc =
av_crc(crc_table, crc, buf, length);
247 memcpy(*
f, buf, length);
250 bytestream_put_be32(
f, ~crc);
254 const uint8_t *buf,
int length)
265 bytestream_put_be32(&
s->bytestream, length + 4);
267 bytestream_put_be32(&
s->bytestream,
MKBETAG(
'f',
'd',
'A',
'T'));
268 bytestream_put_be32(&
s->bytestream,
s->sequence_number);
269 crc =
av_crc(crc_table, crc,
s->bytestream - 8, 8);
271 crc =
av_crc(crc_table, crc, buf, length);
272 memcpy(
s->bytestream, buf, length);
273 s->bytestream += length;
275 bytestream_put_be32(&
s->bytestream, ~crc);
277 ++
s->sequence_number;
284 z_stream *
const zstream = &
s->zstream.zstream;
287 zstream->avail_in =
size;
288 zstream->next_in =
data;
289 while (zstream->avail_in > 0) {
293 if (zstream->avail_out == 0) {
294 if (
s->bytestream_end -
s->bytestream >
IOBUF_SIZE + 100)
297 zstream->next_out =
s->buf;
303 #define PNG_LRINT(d, divisor) lrint((d) * (divisor))
304 #define PNG_Q2D(q, divisor) PNG_LRINT(av_q2d(q), (divisor))
305 #define AV_WB32_PNG_D(buf, q) AV_WB32(buf, PNG_Q2D(q, 100000))
336 z_stream *
const zstream = &
s->zstream.zstream;
339 uint8_t *start, *buf;
342 if (!sd || !sd->
size)
344 zstream->next_in = sd->
data;
345 zstream->avail_in = sd->
size;
348 start =
s->bytestream + 8;
354 for (
int i = 0;;
i++) {
355 char c = (
i == 79) ? 0 :
name[
i];
356 bytestream_put_byte(&buf,
c);
362 bytestream_put_byte(&buf, 0);
363 zstream->next_out = buf;
364 zstream->avail_out =
s->bytestream_end - buf;
366 deflateReset(zstream);
367 if (
ret != Z_STREAM_END)
372 zstream->next_out - start);
386 s->buf[8] =
s->bit_depth;
387 s->buf[9] =
s->color_type;
390 s->buf[12] =
s->is_progressive;
409 switch (stereo3d->
type) {
417 av_log(avctx,
AV_LOG_WARNING,
"Only side-by-side stereo3d flag can be defined within sTER chunk\n");
431 }
else if (
ret < 0) {
473 for (
int i = 0;
i < 3;
i++) {
502 uint8_t *ptr, *alpha_ptr;
504 palette = (uint32_t *)pict->
data[1];
506 alpha_ptr =
s->buf + 256 * 3;
508 for (
i = 0;
i < 256;
i++) {
513 *alpha_ptr++ =
alpha;
514 bytestream_put_be24(&ptr, v);
517 MKTAG(
'P',
'L',
'T',
'E'),
s->buf, 256 * 3);
520 MKTAG(
't',
'R',
'N',
'S'),
s->buf + 256 * 3, 256);
530 z_stream *
const zstream = &
s->zstream.zstream;
533 int row_size, pass_row_size;
534 uint8_t *crow_buf, *crow;
535 uint8_t *crow_base =
NULL;
536 uint8_t *progressive_buf =
NULL;
537 uint8_t *top_buf =
NULL;
539 row_size = (pict->
width *
s->bits_per_pixel + 7) >> 3;
547 crow_buf = crow_base + 15;
548 if (
s->is_progressive) {
549 progressive_buf =
av_malloc(row_size + 1);
551 if (!progressive_buf || !top_buf) {
559 zstream->next_out =
s->buf;
560 if (
s->is_progressive) {
563 for (pass = 0; pass <
NB_PASSES; pass++) {
567 if (pass_row_size > 0) {
569 for (y = 0; y < pict->
height; y++)
571 const uint8_t *ptr =
p->data[0] + y *
p->linesize[0];
572 FFSWAP(uint8_t *, progressive_buf, top_buf);
574 s->bits_per_pixel, pass,
577 top, pass_row_size,
s->bits_per_pixel >> 3);
579 top = progressive_buf;
584 const uint8_t *top =
NULL;
585 for (y = 0; y < pict->
height; y++) {
586 const uint8_t *ptr =
p->data[0] + y *
p->linesize[0];
588 row_size,
s->bits_per_pixel >> 3);
596 if (
ret == Z_OK ||
ret == Z_STREAM_END) {
598 if (
len > 0 &&
s->bytestream_end -
s->bytestream >
len + 100) {
602 zstream->next_out =
s->buf;
603 if (
ret == Z_STREAM_END)
617 deflateReset(zstream);
622 uint64_t *max_packet_size)
626 const int hdr_size = 128;
627 uint64_t new_pkt_size;
633 if (!sd || !sd->
size)
638 bound = deflateBound(&
s->zstream.zstream, sd->
size);
639 if (
bound > INT32_MAX - hdr_size)
642 new_pkt_size = *max_packet_size +
bound + hdr_size;
643 if (new_pkt_size < *max_packet_size)
645 *max_packet_size = new_pkt_size;
650 uint64_t *max_packet_size)
653 uint64_t new_pkt_size;
655 const int base_exif_size = 92;
656 uint64_t estimated_exif_size;
659 estimated_exif_size = sd ? sd->
size : 0;
662 estimated_exif_size += base_exif_size;
664 if (!estimated_exif_size)
668 new_pkt_size = *max_packet_size + estimated_exif_size + 12;
669 if (new_pkt_size < *max_packet_size)
672 *max_packet_size = new_pkt_size;
678 const AVFrame *pict,
int *got_packet)
683 uint64_t max_packet_size;
685 enc_row_size = deflateBound(&
s->zstream.zstream,
686 (avctx->
width *
s->bits_per_pixel + 7) >> 3);
703 s->bytestream_start =
720 pkt->
size =
s->bytestream -
s->bytestream_start;
735 unsigned int rightmost_x = 0;
737 unsigned int bottommost_y = 0;
740 ptrdiff_t input_linesize =
input->linesize[0];
741 ptrdiff_t output_linesize =
output->linesize[0];
751 if (x >= rightmost_x)
755 if (y >= bottommost_y)
756 bottommost_y = y + 1;
763 if (leftmost_x ==
input->
width && rightmost_x == 0) {
766 leftmost_x = topmost_y = 0;
767 rightmost_x = bottommost_y = 1;
773 for (y = topmost_y; y < bottommost_y; ++y) {
775 input->data[0] + input_linesize * y + bpp * leftmost_x,
776 bpp * (rightmost_x - leftmost_x));
780 size_t transparent_palette_index;
783 switch (
input->format) {
791 palette = (uint32_t*)
input->data[1];
792 for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
793 if (palette[transparent_palette_index] >> 24 == 0)
802 for (y = topmost_y; y < bottommost_y; ++y) {
803 const uint8_t *foreground =
input->data[0] + input_linesize * y + bpp * leftmost_x;
804 uint8_t *background =
output->data[0] + output_linesize * y + bpp * leftmost_x;
806 for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp,
output_data += bpp) {
807 if (!memcmp(foreground, background, bpp)) {
809 if (transparent_palette_index == 256) {
825 switch (
input->format) {
827 if (((uint16_t*)foreground)[3] == 0xffff ||
828 ((uint16_t*)background)[3] == 0)
833 if (((uint16_t*)foreground)[1] == 0xffff ||
834 ((uint16_t*)background)[1] == 0)
839 if (foreground[3] == 0xff || background[3] == 0)
844 if (foreground[1] == 0xff || background[1] == 0)
849 if (palette[*foreground] >> 24 == 0xff ||
850 palette[*background] >> 24 == 0)
863 output->width = rightmost_x - leftmost_x;
864 output->height = bottommost_y - topmost_y;
880 uint8_t bpp = (
s->bits_per_pixel + 7) >> 3;
881 uint8_t *original_bytestream, *original_bytestream_end;
882 uint8_t *temp_bytestream = 0, *temp_bytestream_end;
883 uint32_t best_sequence_number;
884 uint8_t *best_bytestream;
885 size_t best_bytestream_size = SIZE_MAX;
908 original_bytestream =
s->bytestream;
909 original_bytestream_end =
s->bytestream_end;
911 temp_bytestream =
av_malloc(original_bytestream_end - original_bytestream);
912 if (!temp_bytestream) {
916 temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
927 uint32_t original_sequence_number =
s->sequence_number, sequence_number;
928 uint8_t *bytestream_start =
s->bytestream;
929 size_t bytestream_size;
941 size_t row_start = diffFrame->
linesize[0] * y + bpp * last_fctl_chunk.
x_offset;
942 memset(diffFrame->
data[0] + row_start, 0, bpp * last_fctl_chunk.
width);
962 sequence_number =
s->sequence_number;
963 s->sequence_number = original_sequence_number;
964 bytestream_size =
s->bytestream - bytestream_start;
965 s->bytestream = bytestream_start;
969 if (bytestream_size < best_bytestream_size) {
970 *best_fctl_chunk = fctl_chunk;
971 *best_last_fctl_chunk = last_fctl_chunk;
973 best_sequence_number = sequence_number;
974 best_bytestream =
s->bytestream;
975 best_bytestream_size = bytestream_size;
977 if (best_bytestream == original_bytestream) {
978 s->bytestream = temp_bytestream;
979 s->bytestream_end = temp_bytestream_end;
981 s->bytestream = original_bytestream;
982 s->bytestream_end = original_bytestream_end;
988 s->sequence_number = best_sequence_number;
989 s->bytestream = original_bytestream + best_bytestream_size;
990 s->bytestream_end = original_bytestream_end;
991 if (best_bytestream != original_bytestream)
992 memcpy(original_bytestream, best_bytestream, best_bytestream_size);
1003 const AVFrame *pict,
int *got_packet)
1008 uint64_t max_packet_size;
1015 s->palette_checksum = checksum;
1016 }
else if (checksum !=
s->palette_checksum) {
1018 "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
1023 enc_row_size = deflateBound(&
s->zstream.zstream,
1024 (avctx->
width *
s->bits_per_pixel + 7) >> 3);
1031 if (max_packet_size > INT_MAX)
1045 if (extradata_size > SIZE_MAX)
1047 s->bytestream =
s->extra_data =
av_malloc(extradata_size);
1055 s->extra_data_size =
s->bytestream -
s->extra_data;
1057 s->last_frame_packet =
av_malloc(max_packet_size);
1058 if (!
s->last_frame_packet)
1060 }
else if (
s->last_frame) {
1065 memcpy(
pkt->
data,
s->last_frame_packet,
s->last_frame_packet_size);
1066 pkt->
pts =
s->last_frame->pts;
1075 s->bytestream_start =
1076 s->bytestream =
s->last_frame_packet;
1077 s->bytestream_end =
s->bytestream + max_packet_size;
1082 ++
s->sequence_number;
1095 if (
s->last_frame) {
1096 uint8_t* last_fctl_chunk_start =
pkt->
data;
1098 if (!
s->extra_data_updated) {
1102 memcpy(side_data,
s->extra_data,
s->extra_data_size);
1103 s->extra_data_updated = 1;
1106 AV_WB32(buf + 0,
s->last_frame_fctl.sequence_number);
1107 AV_WB32(buf + 4,
s->last_frame_fctl.width);
1108 AV_WB32(buf + 8,
s->last_frame_fctl.height);
1109 AV_WB32(buf + 12,
s->last_frame_fctl.x_offset);
1110 AV_WB32(buf + 16,
s->last_frame_fctl.y_offset);
1111 AV_WB16(buf + 20,
s->last_frame_fctl.delay_num);
1112 AV_WB16(buf + 22,
s->last_frame_fctl.delay_den);
1113 buf[24] =
s->last_frame_fctl.dispose_op;
1114 buf[25] =
s->last_frame_fctl.blend_op;
1121 if (!
s->last_frame) {
1126 if (!
s->prev_frame) {
1131 s->prev_frame->format = pict->
format;
1132 s->prev_frame->width = pict->
width;
1133 s->prev_frame->height = pict->
height;
1142 uint8_t bpp = (
s->bits_per_pixel + 7) >> 3;
1143 for (y =
s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset +
s->last_frame_fctl.height; ++y) {
1144 size_t row_start =
s->prev_frame->linesize[0] * y + bpp *
s->last_frame_fctl.x_offset;
1145 memset(
s->prev_frame->data[0] + row_start, 0, bpp *
s->last_frame_fctl.width);
1154 s->last_frame_fctl = fctl_chunk;
1155 s->last_frame_packet_size =
s->bytestream -
s->bytestream_start;
1166 int compression_level;
1190 if (
s->dpi &&
s->dpm) {
1193 }
else if (
s->dpi) {
1194 s->dpm =
s->dpi * 10000 / 254;
1245 ? Z_DEFAULT_COMPRESSION
1259 s->extra_data_size = 0;
1263 #define OFFSET(x) offsetof(PNGEncContext, x)
1264 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1267 {
"dpm",
"Set image resolution (in dots per meter)",
OFFSET(dpm),
AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000,
VE},
enum AVColorTransferCharacteristic color_trc
int ff_encode_reordered_opaque(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame)
Propagate user opaque values from the frame to avctx/pkt as needed.
#define CODEC_PIXFMTS(...)
static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
#define AV_LOG_WARNING
Something somehow does not look correct.
enum AVColorRange color_range
MPEG vs JPEG YUV range.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
@ AVALPHA_MODE_STRAIGHT
Alpha channel is independent of color values.
AVColorTransferCharacteristic
Color Transfer Characteristic.
const FFCodec ff_png_encoder
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Struct that contains both white point location and primaries location, providing the complete descrip...
static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
@ AV_PKT_DATA_NEW_EXTRADATA
The AV_PKT_DATA_NEW_EXTRADATA is used to notify the codec or the format that the extradata buffer was...
#define APNG_FCTL_CHUNK_SIZE
uint8_t * data
The data buffer.
int ff_png_get_nb_channels(int color_type)
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
enum AVColorPrimaries color_primaries
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict, APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
unsigned MaxCLL
Max content light level (cd/m^2).
This structure describes decoded (raw) audio or video data.
@ AV_PIX_FMT_RGBA64BE
packed RGBA 16:16:16:16, 64bpp, 16R, 16G, 16B, 16A, the 2-byte value for each R/G/B/A component is st...
#define PNG_FILTER_VALUE_MIXED
static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
static int output_data(MLPDecodeContext *m, unsigned int substr, AVFrame *frame, int *got_frame_ptr)
Write the audio data into the output buffer.
int dpm
Physical pixel density, in dots per meter, if set.
@ AV_FRAME_DATA_DISPLAYMATRIX
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
int64_t duration
Duration of this packet in AVStream->time_base units, 0 if unknown.
static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
uint8_t * last_frame_packet
AVColorPrimaries
Chromaticity coordinates of the source primaries.
void ff_deflate_end(FFZStream *zstream)
Wrapper around deflateEnd().
#define FF_COMPRESSION_DEFAULT
@ APNG_DISPOSE_OP_BACKGROUND
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
#define FF_INPUT_BUFFER_MIN_SIZE
Used by some encoders as upper bound for the length of headers.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
const FFCodec ff_apng_encoder
static void sub_png_paeth_prediction(uint8_t *dst, const uint8_t *src, const uint8_t *top, int w, int bpp)
@ AV_PIX_FMT_GRAY16BE
Y , 16bpp, big-endian.
static av_cold void close(AVCodecParserContext *s)
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
AVCodec p
The public AVCodec.
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
int ff_png_pass_row_size(int pass, int bits_per_pixel, int width)
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
int flags
AV_CODEC_FLAG_*.
#define FF_CODEC_ENCODE_CB(func)
static int encode_png(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
#define PNG_COLOR_TYPE_RGB_ALPHA
#define AV_CODEC_FLAG_INTERLACED_DCT
Use interlaced DCT.
static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type, const uint8_t *src, const uint8_t *top, int size, int bpp)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
static int encode_apng(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pict, int *got_packet)
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
const AVColorPrimariesDesc * av_csp_primaries_desc_from_id(enum AVColorPrimaries prm)
Retrieves a complete gamut description from an enum constant describing the color primaries.
static void png_write_chunk(uint8_t **f, uint32_t tag, const uint8_t *buf, int length)
#define PNG_COLOR_TYPE_RGB
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This encoder can reorder user opaque values from input AVFrames and return them with corresponding ou...
#define av_assert0(cond)
assert() equivalent, that is always enabled.
int bits_per_raw_sample
Bits per sample/pixel of internal libavcodec pixel/sample format.
#define PNG_Q2D(q, divisor)
static void png_write_image_data(AVCodecContext *avctx, const uint8_t *buf, int length)
#define CODEC_LONG_NAME(str)
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
int flags
Additional information about the frame packing.
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
@ AV_PIX_FMT_GRAY8A
alias for AV_PIX_FMT_YA8
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
@ AV_EXIF_TIFF_HEADER
The TIFF header starts with 0x49492a00, or 0x4d4d002a.
#define av_unreachable(msg)
Asserts that are used as compiler optimization hints depending upon ASSERT_LEVEL and NBDEBUG.
@ AV_PIX_FMT_MONOBLACK
Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb.
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
const char * av_default_item_name(void *ptr)
Return the context name.
static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input, APNGFctlChunk *fctl_chunk, uint8_t bpp)
static av_cold int png_enc_close(AVCodecContext *avctx)
@ AV_FRAME_DATA_ICC_PROFILE
The data contains an ICC profile as an opaque octet buffer following the format described by ISO 1507...
@ APNG_DISPOSE_OP_PREVIOUS
#define PNG_COLOR_TYPE_GRAY
static void deflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
const uint8_t ff_png_pass_ymask[NB_PASSES]
static int add_exif_profile_size(AVCodecContext *avctx, const AVFrame *pict, uint64_t *max_packet_size)
av_cold void ff_llvidencdsp_init(LLVidEncDSPContext *c)
static int add_icc_profile_size(AVCodecContext *avctx, const AVFrame *pict, uint64_t *max_packet_size)
AVAlphaMode
Correlation between the alpha channel and color values.
#define PNG_FILTER_VALUE_NONE
int(* init)(AVBSFContext *ctx)
@ AV_PIX_FMT_RGB24
packed RGB 8:8:8, 24bpp, RGBRGB...
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
@ AV_PIX_FMT_YA16BE
16 bits gray, 16 bits alpha (big-endian)
size_t last_frame_packet_size
#define PNG_FILTER_VALUE_AVG
double av_csp_approximate_trc_gamma(enum AVColorTransferCharacteristic trc)
Determine a suitable 'gamma' value to match the supplied AVColorTransferCharacteristic.
#define MKBETAG(a, b, c, d)
LLVidEncDSPContext llvidencdsp
#define PNG_FILTER_VALUE_PAETH
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
static uint8_t * png_choose_filter(PNGEncContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *top, int size, int bpp)
#define PNG_FILTER_VALUE_UP
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
const AVCRC * av_crc_get_table(AVCRCId crc_id)
Get an initialized standard CRC table.
#define AVERROR_EXTERNAL
Generic error in an external library.
int flags
A combination of AV_PKT_FLAG values.
#define AV_STEREO3D_FLAG_INVERT
Inverted views, Right/Bottom represents the left view.
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
size_t size
Size of data in bytes.
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
#define PNG_FILTER_VALUE_SUB
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
@ AV_PIX_FMT_RGB48BE
packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big...
#define i(width, name, range_min, range_max)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
static const AVOption options[]
@ AV_FRAME_DATA_STEREO3D
Stereoscopic 3d metadata.
const char * name
Name of the codec implementation.
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
#define FF_CODEC_CAP_ICC_PROFILES
Codec supports embedded ICC profiles (AV_FRAME_DATA_ICC_PROFILE).
static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
int64_t frame_num
Frame counter, set by libavcodec.
static double bound(const double threshold, const double val)
static const float pred[4]
#define FFSWAP(type, a, b)
@ AVALPHA_MODE_UNSPECIFIED
Unknown alpha handling, or no alpha channel.
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
enum AVStereo3DType type
How views are packed within the video.
#define PNG_LRINT(d, divisor)
uint8_t * bytestream_start
#define AV_INPUT_BUFFER_PADDING_SIZE
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
main external API structure.
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
uint32_t av_crc(const AVCRC *ctx, uint32_t crc, const uint8_t *buffer, size_t length)
Calculate the CRC of a block.
@ AV_OPT_TYPE_INT
Underlying C type is int.
static void png_get_interlaced_row(uint8_t *dst, int row_size, int bits_per_pixel, int pass, const uint8_t *src, int width)
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
#define PNG_COLOR_MASK_PALETTE
IDirect3DDxgiInterfaceAccess _COM_Outptr_ void ** p
#define AV_WB32_PNG_D(buf, q)
APNGFctlChunk last_frame_fctl
int dpi
Physical pixel density, in dots per inch, if set.
A reference to a data buffer.
Structure to hold side data for an AVFrame.
static av_cold int png_enc_init(AVCodecContext *avctx)
static int png_write_iccp(PNGEncContext *s, const AVFrameSideData *sd)
static const int16_t alpha[]
This structure stores compressed data.
unsigned MaxFALL
Max average light level per frame (cd/m^2).
int ff_exif_get_buffer(void *logctx, const AVFrame *frame, AVBufferRef **buffer_ptr, enum AVExifHeaderMode header_mode)
Gets all relevant side data, collects it into an IFD, and writes it into the corresponding buffer poi...
int width
picture width / height.
@ AV_FRAME_DATA_EXIF
Extensible image file format metadata.
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
#define PNG_COLOR_TYPE_GRAY_ALPHA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
#define MKTAG(a, b, c, d)
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
static void input_data(MLPEncodeContext *ctx, MLPSubstream *s, uint8_t **const samples, int nb_samples)
Wrapper function for inputting data in two different bit-depths.
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
int ff_alloc_packet(AVCodecContext *avctx, AVPacket *avpkt, int64_t size)
Check AVPacket size and allocate data.
static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
uint32_t palette_checksum
#define PNG_COLOR_TYPE_PALETTE
int ff_deflate_init(FFZStream *zstream, int level, void *logctx)
Wrapper around deflateInit().
static const AVClass pngenc_class