24 #define X265_API_IMPORTS 1 63 case NAL_UNIT_CODED_SLICE_BLA_W_LP:
64 case NAL_UNIT_CODED_SLICE_BLA_W_RADL:
65 case NAL_UNIT_CODED_SLICE_BLA_N_LP:
66 case NAL_UNIT_CODED_SLICE_IDR_W_RADL:
67 case NAL_UNIT_CODED_SLICE_IDR_N_LP:
68 case NAL_UNIT_CODED_SLICE_CRA:
93 snprintf(buf,
sizeof(buf),
"%2.2f", value);
94 if (ctx->
api->param_parse(ctx->
params, key, buf) == X265_PARAM_BAD_VALUE) {
108 snprintf(buf,
sizeof(buf),
"%d", value);
109 if (ctx->
api->param_parse(ctx->
params, key, buf) == X265_PARAM_BAD_VALUE) {
125 ctx->
api = x265_api_get(0);
138 for (i = 0; x265_preset_names[
i]; i++)
143 for (i = 0; x265_tune_names[
i]; i++)
165 if (ctx->
params->sourceWidth < 64 || ctx->
params->sourceHeight < 64)
166 ctx->
params->maxCUSize = 32;
167 if (ctx->
params->sourceWidth < 32 || ctx->
params->sourceHeight < 32)
168 ctx->
params->maxCUSize = 16;
169 if (ctx->
params->sourceWidth < 16 || ctx->
params->sourceHeight < 16) {
176 ctx->
params->vui.bEnableVideoSignalTypePresentFlag = 1;
190 ctx->
params->vui.bEnableColorDescriptionPresentFlag = 1;
195 #if X265_BUILD >= 159 197 ctx->
params->preferredTransferCharacteristics = ctx->
params->vui.transferCharacteristics;
204 int sar_num, sar_den;
209 snprintf(sar,
sizeof(sar),
"%d:%d", sar_num, sar_den);
210 if (ctx->
api->param_parse(ctx->
params,
"sar", sar) == X265_PARAM_BAD_VALUE) {
220 ctx->
params->internalCsp = X265_CSP_I420;
225 ctx->
params->internalCsp = X265_CSP_I422;
231 ctx->
params->vui.bEnableVideoSignalTypePresentFlag = 1;
232 ctx->
params->vui.bEnableColorDescriptionPresentFlag = 1;
236 ctx->
params->internalCsp = X265_CSP_I444;
241 if (ctx->
api->api_build_number < 85) {
243 "libx265 version is %d, must be at least 85 for gray encoding.\n",
244 ctx->
api->api_build_number);
247 ctx->
params->internalCsp = X265_CSP_I400;
255 if (ctx->
api->param_parse(ctx->
params,
"crf", crf) == X265_PARAM_BAD_VALUE) {
261 ctx->
params->rc.rateControlMode = X265_RC_ABR;
262 }
else if (ctx->
cqp >= 0) {
269 if (avctx->
qmin >= 0) {
274 if (avctx->
qmax >= 0) {
285 if (avctx->
qblur >= 0) {
317 ctx->
params->bRepeatHeaders = 1;
334 if (avctx->
refs >= 0) {
346 case X265_PARAM_BAD_NAME:
348 "Unknown option: %s.\n", en->
key);
350 case X265_PARAM_BAD_VALUE:
352 "Invalid value for %s: %s.\n", en->
key, en->
value);
361 ctx->
params->rc.vbvBufferInit == 0.9) {
370 for (i = 0; x265_profile_names[
i]; i++)
398 "Cannot allocate HEVC header of size %d.\n", avctx->
extradata_size);
414 if (ctx->
params->rc.aqMode == X265_AQ_NONE) {
417 av_log(ctx,
AV_LOG_WARNING,
"Adaptive quantization must be enabled to use ROI encoding, skipping ROI.\n");
421 int mb_size = (ctx->
params->rc.qgSize == 8) ? 8 : 16;
422 int mbx = (frame->
width + mb_size - 1) / mb_size;
423 int mby = (frame->
height + mb_size - 1) / mb_size;
424 int qp_range = 51 + 6 * (pic->bitDepth - 8);
432 if (!roi_size || sd->
size % roi_size != 0) {
436 nb_rois = sd->
size / roi_size;
444 for (
int i = nb_rois - 1;
i >= 0;
i--) {
445 int startx, endx, starty, endy;
450 starty =
FFMIN(mby, roi->
top / mb_size);
451 endy =
FFMIN(mby, (roi->
bottom + mb_size - 1)/ mb_size);
452 startx =
FFMIN(mbx, roi->
left / mb_size);
453 endx =
FFMIN(mbx, (roi->
right + mb_size - 1)/ mb_size);
461 qoffset = av_clipf(qoffset * qp_range, -qp_range, +qp_range);
463 for (
int y = starty; y < endy; y++)
464 for (
int x = startx; x < endx; x++)
465 qoffsets[x + y*mbx] = qoffset;
468 pic->quantOffsets = qoffsets;
475 const AVFrame *pic,
int *got_packet)
478 x265_picture x265pic;
479 x265_picture x265pic_out = { 0 };
488 ctx->
api->picture_init(ctx->
params, &x265pic);
491 for (i = 0; i < 3; i++) {
492 x265pic.planes[
i] = pic->
data[
i];
496 x265pic.pts = pic->
pts;
500 (ctx->
forced_idr ? X265_TYPE_IDR : X265_TYPE_I) :
511 if (!x265pic.userData) {
520 ret = ctx->
api->encoder_encode(ctx->
encoder, &nal, &nnal,
521 pic ? &x265pic :
NULL, &x265pic_out);
531 for (i = 0; i < nnal; i++)
532 payload += nal[i].sizeBytes;
541 for (i = 0; i < nnal; i++) {
542 memcpy(dst, nal[i].payload, nal[i].sizeBytes);
543 dst += nal[
i].sizeBytes;
549 pkt->
pts = x265pic_out.pts;
550 pkt->
dts = x265pic_out.dts;
552 switch (x265pic_out.sliceType) {
569 #if FF_API_CODED_FRAME 575 #if X265_BUILD >= 130 576 if (x265pic_out.sliceType == X265_TYPE_B)
578 if (x265pic_out.frameData.sliceType ==
'b')
584 if (x265pic_out.userData) {
647 if (x265_api_get(12))
649 else if (x265_api_get(10))
651 else if (x265_api_get(8))
655 #define OFFSET(x) offsetof(libx265Context, x) 656 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM 664 {
"x265-params",
"set the x265 configuration using a :-separated list of key=value parameters",
OFFSET(
x265_opts),
AV_OPT_TYPE_DICT, { 0 }, 0, 0, VE },
679 {
"keyint_min",
"-1" },
686 {
"i_qfactor",
"-1" },
687 {
"b_qfactor",
"-1" },
701 .priv_class = &
class,
705 .wrapper_name =
"libx265",
static const AVOption options[]
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
int top
Distance in pixels from the top edge of the frame to the top and bottom edges and from the left edge ...
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
int ff_side_data_set_encoder_stats(AVPacket *pkt, int quality, int64_t *error, int error_count, int pict_type)
static int is_keyframe(NalUnitType naltype)
float qblur
amount of qscale smoothing over time (0.0-1.0)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
#define AV_LOG_WARNING
Something somehow does not look correct.
int64_t bit_rate
the average bitrate
#define LIBAVUTIL_VERSION_INT
int max_bitrate
Maximum bitrate of the stream, in bits per second.
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
int rc_initial_buffer_occupancy
Number of bits which should be loaded into the rc buffer before decoding starts.
SMPTE ST 432-1 (2010) / P3 D65 / Display P3.
enum AVColorRange color_range
MPEG vs JPEG YUV range.
#define AV_PIX_FMT_GBRP10
const char * av_default_item_name(void *ptr)
Return the context name.
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel...
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
#define AV_PIX_FMT_YUV420P12
#define AV_CODEC_CAP_AUTO_THREADS
Codec supports avctx->thread_count == 0 (auto).
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
#define AV_PKT_FLAG_DISPOSABLE
Flag is used to indicate packets that contain frames that can be discarded by the decoder...
AVRational qoffset
Quantisation offset.
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
#define AV_PIX_FMT_GRAY10
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
#define AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE
This codec takes the reordered_opaque field from input AVFrames and returns it in the corresponding f...
#define AV_PIX_FMT_GRAY12
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
int ff_alloc_packet2(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int64_t min_size)
Check AVPacket size and/or allocate data.
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
float b_quant_factor
qscale factor between IP and B-frames If > 0 then the last P-frame quantizer will be used (q= lastp_q...
GLsizei GLboolean const GLfloat * value
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Structure to hold side data for an AVFrame.
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
int buffer_size
The size of the buffer to which the ratecontrol is applied, in bits.
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
uint32_t self_size
Must be set to the size of this data structure (that is, sizeof(AVRegionOfInterest)).
#define AV_PIX_FMT_YUV422P12
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
static int libx265_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *pic, int *got_packet)
int qmax
maximum quantizer
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
int flags
AV_CODEC_FLAG_*.
const char * name
Name of the codec implementation.
float i_quant_factor
qscale factor between P- and I-frames If > 0 then the last P-frame quantizer will be used (q = lastp_...
#define AV_PIX_FMT_YUV444P10
static const AVCodecDefault defaults[]
int flags
A combination of AV_PKT_FLAG values.
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
int rc_buffer_size
decoder bitstream buffer size
common internal API header
int refs
number of reference frames
static av_cold void libx265_encode_init_csp(AVCodec *codec)
enum AVPixelFormat * pix_fmts
array of supported pixel formats, or NULL if unknown, array is terminated by -1
enum AVPictureType pict_type
Picture type of the frame.
AVCodec ff_libx265_encoder
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
static av_cold int libx265_encode_init(AVCodecContext *avctx)
int width
picture width / height.
#define AV_CODEC_FLAG_PSNR
error[?] variables will be set during encoding.
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Structure describing a single Region Of Interest.
int64_t reordered_opaque
opaque 64-bit number (generally a PTS) that will be reordered and output in AVFrame.reordered_opaque
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
static av_cold int libx265_encode_set_roi(libx265Context *ctx, const AVFrame *frame, x265_picture *pic)
int max_qdiff
maximum quantizer difference between frames
int thread_count
thread count is used to decide how many independent tasks should be passed to execute() ...
This structure describes the bitrate properties of an encoded bitstream.
#define AV_LOG_INFO
Standard information.
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
main external API structure.
int qmin
minimum quantizer
#define AV_PIX_FMT_YUV420P10
int64_t reordered_opaque
reordered opaque 64 bits (generally an integer or a double precision float PTS but can be anything)...
Describe the class of an AVClass context structure.
enum AVColorSpace colorspace
YUV colorspace type.
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
static enum AVPixelFormat x265_csp_ten[]
Regions Of Interest, the data is an array of AVRegionOfInterest type, the number of array element is ...
int roi_warned
If the encoder does not support ROI then warn the first time we encounter a frame with ROI side data...
float qcompress
amount of qscale change between easy & hard scenes (0.0-1.0)
#define AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV444P12
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
#define FF_DISABLE_DEPRECATION_WARNINGS
common internal api header.
common internal and external API header
static enum AVPixelFormat x265_csp_twelve[]
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
attribute_deprecated AVFrame * coded_frame
the picture in the bitstream
AVCPBProperties * ff_add_cpb_side_data(AVCodecContext *avctx)
Add a CPB properties side data to an encoding context.
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
ARIB STD-B67, known as "Hybrid log-gamma".
static const AVCodecDefault x265_defaults[]
#define FF_ENABLE_DEPRECATION_WARNINGS
int avg_bitrate
Average bitrate of the stream, in bits per second.
static av_cold int libx265_param_parse_float(AVCodecContext *avctx, const char *key, float value)
#define FF_QP2LAMBDA
factor to convert from H.263 QP to lambda
static av_cold int libx265_param_parse_int(AVCodecContext *avctx, const char *key, int value)
static av_cold int libx265_encode_close(AVCodecContext *avctx)
static enum AVPixelFormat x265_csp_eight[]
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key, ignoring the suffix of the found key string.
#define AV_CODEC_FLAG_CLOSED_GOP
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
#define AVERROR_EXTERNAL
Generic error in an external library.
AVPixelFormat
Pixel format.
This structure stores compressed data.
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
int64_t rc_max_rate
maximum bitrate
void * av_mallocz_array(size_t nmemb, size_t size)
int keyint_min
minimum GOP size