FFmpeg
librav1e.c
Go to the documentation of this file.
1 /*
2  * librav1e encoder
3  *
4  * Copyright (c) 2019 Derek Buitenhuis
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <rav1e.h>
24 
25 #include "libavutil/internal.h"
26 #include "libavutil/avassert.h"
27 #include "libavutil/base64.h"
28 #include "libavutil/common.h"
29 #include "libavutil/mathematics.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/pixdesc.h"
32 #include "avcodec.h"
33 #include "internal.h"
34 
35 typedef struct librav1eContext {
36  const AVClass *class;
37 
38  RaContext *ctx;
40 
42  size_t pass_pos;
43  int pass_size;
44 
46  int quantizer;
47  int speed;
48  int tiles;
49  int tile_rows;
50  int tile_cols;
52 
53 static inline RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
54 {
55  switch (pix_fmt) {
59  return RA_PIXEL_RANGE_FULL;
60  }
61 
62  switch (range) {
63  case AVCOL_RANGE_JPEG:
64  return RA_PIXEL_RANGE_FULL;
65  case AVCOL_RANGE_MPEG:
66  default:
67  return RA_PIXEL_RANGE_LIMITED;
68  }
69 }
70 
71 static inline RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
72 {
73  switch (pix_fmt) {
74  case AV_PIX_FMT_YUV420P:
78  return RA_CHROMA_SAMPLING_CS420;
79  case AV_PIX_FMT_YUV422P:
83  return RA_CHROMA_SAMPLING_CS422;
84  case AV_PIX_FMT_YUV444P:
88  return RA_CHROMA_SAMPLING_CS444;
89  default:
90  av_assert0(0);
91  }
92 }
93 
94 static inline RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
95 {
96  switch (chroma_loc) {
97  case AVCHROMA_LOC_LEFT:
98  return RA_CHROMA_SAMPLE_POSITION_VERTICAL;
100  return RA_CHROMA_SAMPLE_POSITION_COLOCATED;
101  default:
102  return RA_CHROMA_SAMPLE_POSITION_UNKNOWN;
103  }
104 }
105 
106 static int get_stats(AVCodecContext *avctx, int eos)
107 {
108  librav1eContext *ctx = avctx->priv_data;
109  RaData* buf = rav1e_twopass_out(ctx->ctx);
110  if (!buf)
111  return 0;
112 
113  if (!eos) {
115  ctx->pass_pos + buf->len);
116  if (!tmp) {
117  rav1e_data_unref(buf);
118  return AVERROR(ENOMEM);
119  }
120 
121  ctx->pass_data = tmp;
122  memcpy(ctx->pass_data + ctx->pass_pos, buf->data, buf->len);
123  ctx->pass_pos += buf->len;
124  } else {
125  size_t b64_size = AV_BASE64_SIZE(ctx->pass_pos);
126 
127  memcpy(ctx->pass_data, buf->data, buf->len);
128 
129  avctx->stats_out = av_malloc(b64_size);
130  if (!avctx->stats_out) {
131  rav1e_data_unref(buf);
132  return AVERROR(ENOMEM);
133  }
134 
135  av_base64_encode(avctx->stats_out, b64_size, ctx->pass_data, ctx->pass_pos);
136 
137  av_freep(&ctx->pass_data);
138  }
139 
140  rav1e_data_unref(buf);
141 
142  return 0;
143 }
144 
145 static int set_stats(AVCodecContext *avctx)
146 {
147  librav1eContext *ctx = avctx->priv_data;
148  int ret = 1;
149 
150  while (ret > 0 && ctx->pass_size - ctx->pass_pos > 0) {
151  ret = rav1e_twopass_in(ctx->ctx, ctx->pass_data + ctx->pass_pos, ctx->pass_size);
152  if (ret < 0)
153  return AVERROR_EXTERNAL;
154  ctx->pass_pos += ret;
155  }
156 
157  return 0;
158 }
159 
161 {
162  librav1eContext *ctx = avctx->priv_data;
163 
164  if (ctx->ctx) {
165  rav1e_context_unref(ctx->ctx);
166  ctx->ctx = NULL;
167  }
168 
169  av_bsf_free(&ctx->bsf);
170  av_freep(&ctx->pass_data);
171 
172  return 0;
173 }
174 
176 {
177  librav1eContext *ctx = avctx->priv_data;
179  RaConfig *cfg = NULL;
180  int rret;
181  int ret = 0;
182 
183  cfg = rav1e_config_default();
184  if (!cfg) {
185  av_log(avctx, AV_LOG_ERROR, "Could not allocate rav1e config.\n");
186  return AVERROR_EXTERNAL;
187  }
188 
189  rav1e_config_set_time_base(cfg, (RaRational) {
190  avctx->time_base.num * avctx->ticks_per_frame,
191  avctx->time_base.den
192  });
193 
194  if (avctx->flags & AV_CODEC_FLAG_PASS2) {
195  if (!avctx->stats_in) {
196  av_log(avctx, AV_LOG_ERROR, "No stats file provided for second pass.\n");
197  ret = AVERROR(EINVAL);
198  goto end;
199  }
200 
201  ctx->pass_size = (strlen(avctx->stats_in) * 3) / 4;
202  ctx->pass_data = av_malloc(ctx->pass_size);
203  if (!ctx->pass_data) {
204  av_log(avctx, AV_LOG_ERROR, "Could not allocate stats buffer.\n");
205  ret = AVERROR(ENOMEM);
206  goto end;
207  }
208 
209  ctx->pass_size = av_base64_decode(ctx->pass_data, avctx->stats_in, ctx->pass_size);
210  if (ctx->pass_size < 0) {
211  av_log(avctx, AV_LOG_ERROR, "Invalid pass file.\n");
212  ret = AVERROR(EINVAL);
213  goto end;
214  }
215  }
216 
217  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
218  const AVBitStreamFilter *filter = av_bsf_get_by_name("extract_extradata");
219  int bret;
220 
221  if (!filter) {
222  av_log(avctx, AV_LOG_ERROR, "extract_extradata bitstream filter "
223  "not found. This is a bug, please report it.\n");
224  ret = AVERROR_BUG;
225  goto end;
226  }
227 
228  bret = av_bsf_alloc(filter, &ctx->bsf);
229  if (bret < 0) {
230  ret = bret;
231  goto end;
232  }
233 
234  bret = avcodec_parameters_from_context(ctx->bsf->par_in, avctx);
235  if (bret < 0) {
236  ret = bret;
237  goto end;
238  }
239 
240  bret = av_bsf_init(ctx->bsf);
241  if (bret < 0) {
242  ret = bret;
243  goto end;
244  }
245  }
246 
247  {
248  AVDictionaryEntry *en = NULL;
249  while ((en = av_dict_get(ctx->rav1e_opts, "", en, AV_DICT_IGNORE_SUFFIX))) {
250  int parse_ret = rav1e_config_parse(cfg, en->key, en->value);
251  if (parse_ret < 0)
252  av_log(avctx, AV_LOG_WARNING, "Invalid value for %s: %s.\n", en->key, en->value);
253  }
254  }
255 
256  rret = rav1e_config_parse_int(cfg, "width", avctx->width);
257  if (rret < 0) {
258  av_log(avctx, AV_LOG_ERROR, "Invalid width passed to rav1e.\n");
259  ret = AVERROR_INVALIDDATA;
260  goto end;
261  }
262 
263  rret = rav1e_config_parse_int(cfg, "height", avctx->height);
264  if (rret < 0) {
265  av_log(avctx, AV_LOG_ERROR, "Invalid height passed to rav1e.\n");
266  ret = AVERROR_INVALIDDATA;
267  goto end;
268  }
269 
270  rret = rav1e_config_parse_int(cfg, "threads", avctx->thread_count);
271  if (rret < 0)
272  av_log(avctx, AV_LOG_WARNING, "Invalid number of threads, defaulting to auto.\n");
273 
274  if (ctx->speed >= 0) {
275  rret = rav1e_config_parse_int(cfg, "speed", ctx->speed);
276  if (rret < 0) {
277  av_log(avctx, AV_LOG_ERROR, "Could not set speed preset.\n");
278  ret = AVERROR_EXTERNAL;
279  goto end;
280  }
281  }
282 
283  /* rav1e handles precedence between 'tiles' and cols/rows for us. */
284  if (ctx->tiles > 0) {
285  rret = rav1e_config_parse_int(cfg, "tiles", ctx->tiles);
286  if (rret < 0) {
287  av_log(avctx, AV_LOG_ERROR, "Could not set number of tiles to encode with.\n");
288  ret = AVERROR_EXTERNAL;
289  goto end;
290  }
291  }
292  if (ctx->tile_rows > 0) {
293  rret = rav1e_config_parse_int(cfg, "tile_rows", ctx->tile_rows);
294  if (rret < 0) {
295  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile rows to encode with.\n");
296  ret = AVERROR_EXTERNAL;
297  goto end;
298  }
299  }
300  if (ctx->tile_cols > 0) {
301  rret = rav1e_config_parse_int(cfg, "tile_cols", ctx->tile_cols);
302  if (rret < 0) {
303  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile cols to encode with.\n");
304  ret = AVERROR_EXTERNAL;
305  goto end;
306  }
307  }
308 
309  if (avctx->gop_size > 0) {
310  rret = rav1e_config_parse_int(cfg, "key_frame_interval", avctx->gop_size);
311  if (rret < 0) {
312  av_log(avctx, AV_LOG_ERROR, "Could not set max keyint.\n");
313  ret = AVERROR_EXTERNAL;
314  goto end;
315  }
316  }
317 
318  if (avctx->keyint_min > 0) {
319  rret = rav1e_config_parse_int(cfg, "min_key_frame_interval", avctx->keyint_min);
320  if (rret < 0) {
321  av_log(avctx, AV_LOG_ERROR, "Could not set min keyint.\n");
322  ret = AVERROR_EXTERNAL;
323  goto end;
324  }
325  }
326 
327  if (avctx->bit_rate && ctx->quantizer < 0) {
328  int max_quantizer = avctx->qmax >= 0 ? avctx->qmax : 255;
329 
330  rret = rav1e_config_parse_int(cfg, "quantizer", max_quantizer);
331  if (rret < 0) {
332  av_log(avctx, AV_LOG_ERROR, "Could not set max quantizer.\n");
333  ret = AVERROR_EXTERNAL;
334  goto end;
335  }
336 
337  if (avctx->qmin >= 0) {
338  rret = rav1e_config_parse_int(cfg, "min_quantizer", avctx->qmin);
339  if (rret < 0) {
340  av_log(avctx, AV_LOG_ERROR, "Could not set min quantizer.\n");
341  ret = AVERROR_EXTERNAL;
342  goto end;
343  }
344  }
345 
346  rret = rav1e_config_parse_int(cfg, "bitrate", avctx->bit_rate);
347  if (rret < 0) {
348  av_log(avctx, AV_LOG_ERROR, "Could not set bitrate.\n");
349  ret = AVERROR_INVALIDDATA;
350  goto end;
351  }
352  } else if (ctx->quantizer >= 0) {
353  if (avctx->bit_rate)
354  av_log(avctx, AV_LOG_WARNING, "Both bitrate and quantizer specified. Using quantizer mode.");
355 
356  rret = rav1e_config_parse_int(cfg, "quantizer", ctx->quantizer);
357  if (rret < 0) {
358  av_log(avctx, AV_LOG_ERROR, "Could not set quantizer.\n");
359  ret = AVERROR_EXTERNAL;
360  goto end;
361  }
362  }
363 
364  rret = rav1e_config_set_pixel_format(cfg, desc->comp[0].depth,
365  pix_fmt_map(avctx->pix_fmt),
367  range_map(avctx->pix_fmt, avctx->color_range));
368  if (rret < 0) {
369  av_log(avctx, AV_LOG_ERROR, "Failed to set pixel format properties.\n");
370  ret = AVERROR_INVALIDDATA;
371  goto end;
372  }
373 
374  /* rav1e's colorspace enums match standard values. */
375  rret = rav1e_config_set_color_description(cfg, (RaMatrixCoefficients) avctx->colorspace,
376  (RaColorPrimaries) avctx->color_primaries,
377  (RaTransferCharacteristics) avctx->color_trc);
378  if (rret < 0) {
379  av_log(avctx, AV_LOG_WARNING, "Failed to set color properties.\n");
380  if (avctx->err_recognition & AV_EF_EXPLODE) {
381  ret = AVERROR_INVALIDDATA;
382  goto end;
383  }
384  }
385 
386  ctx->ctx = rav1e_context_new(cfg);
387  if (!ctx->ctx) {
388  av_log(avctx, AV_LOG_ERROR, "Failed to create rav1e encode context.\n");
389  ret = AVERROR_EXTERNAL;
390  goto end;
391  }
392 
393  ret = 0;
394 
395 end:
396 
397  rav1e_config_unref(cfg);
398 
399  return ret;
400 }
401 
403 {
404  librav1eContext *ctx = avctx->priv_data;
405  RaFrame *rframe = NULL;
406  int ret;
407 
408  if (frame) {
410 
411  rframe = rav1e_frame_new(ctx->ctx);
412  if (!rframe) {
413  av_log(avctx, AV_LOG_ERROR, "Could not allocate new rav1e frame.\n");
414  return AVERROR(ENOMEM);
415  }
416 
417  for (int i = 0; i < desc->nb_components; i++) {
418  int shift = i ? desc->log2_chroma_h : 0;
419  int bytes = desc->comp[0].depth == 8 ? 1 : 2;
420  rav1e_frame_fill_plane(rframe, i, frame->data[i],
421  (frame->height >> shift) * frame->linesize[i],
422  frame->linesize[i], bytes);
423  }
424  }
425 
426  ret = rav1e_send_frame(ctx->ctx, rframe);
427  if (rframe)
428  rav1e_frame_unref(rframe); /* No need to unref if flushing. */
429 
430  switch (ret) {
431  case RA_ENCODER_STATUS_SUCCESS:
432  break;
433  case RA_ENCODER_STATUS_ENOUGH_DATA:
434  return AVERROR(EAGAIN);
435  case RA_ENCODER_STATUS_FAILURE:
436  av_log(avctx, AV_LOG_ERROR, "Could not send frame: %s\n", rav1e_status_to_str(ret));
437  return AVERROR_EXTERNAL;
438  default:
439  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_send_frame: %s\n", ret, rav1e_status_to_str(ret));
440  return AVERROR_UNKNOWN;
441  }
442 
443  return 0;
444 }
445 
447 {
448  librav1eContext *ctx = avctx->priv_data;
449  RaPacket *rpkt = NULL;
450  int ret;
451 
452 retry:
453 
454  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
455  int sret = get_stats(avctx, 0);
456  if (sret < 0)
457  return sret;
458  } else if (avctx->flags & AV_CODEC_FLAG_PASS2) {
459  int sret = set_stats(avctx);
460  if (sret < 0)
461  return sret;
462  }
463 
464  ret = rav1e_receive_packet(ctx->ctx, &rpkt);
465  switch (ret) {
466  case RA_ENCODER_STATUS_SUCCESS:
467  break;
468  case RA_ENCODER_STATUS_LIMIT_REACHED:
469  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
470  int sret = get_stats(avctx, 1);
471  if (sret < 0)
472  return sret;
473  }
474  return AVERROR_EOF;
475  case RA_ENCODER_STATUS_ENCODED:
476  if (avctx->internal->draining)
477  goto retry;
478  return AVERROR(EAGAIN);
479  case RA_ENCODER_STATUS_NEED_MORE_DATA:
480  if (avctx->internal->draining) {
481  av_log(avctx, AV_LOG_ERROR, "Unexpected error when receiving packet after EOF.\n");
482  return AVERROR_EXTERNAL;
483  }
484  return AVERROR(EAGAIN);
485  case RA_ENCODER_STATUS_FAILURE:
486  av_log(avctx, AV_LOG_ERROR, "Could not encode frame: %s\n", rav1e_status_to_str(ret));
487  return AVERROR_EXTERNAL;
488  default:
489  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_receive_packet: %s\n", ret, rav1e_status_to_str(ret));
490  return AVERROR_UNKNOWN;
491  }
492 
493  ret = av_new_packet(pkt, rpkt->len);
494  if (ret < 0) {
495  av_log(avctx, AV_LOG_ERROR, "Could not allocate packet.\n");
496  rav1e_packet_unref(rpkt);
497  return ret;
498  }
499 
500  memcpy(pkt->data, rpkt->data, rpkt->len);
501 
502  if (rpkt->frame_type == RA_FRAME_TYPE_KEY)
503  pkt->flags |= AV_PKT_FLAG_KEY;
504 
505  pkt->pts = pkt->dts = rpkt->input_frameno * avctx->ticks_per_frame;
506  rav1e_packet_unref(rpkt);
507 
508  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
509  int ret = av_bsf_send_packet(ctx->bsf, pkt);
510  if (ret < 0) {
511  av_log(avctx, AV_LOG_ERROR, "extradata extraction send failed.\n");
512  av_packet_unref(pkt);
513  return ret;
514  }
515 
516  ret = av_bsf_receive_packet(ctx->bsf, pkt);
517  if (ret < 0) {
518  av_log(avctx, AV_LOG_ERROR, "extradata extraction receive failed.\n");
519  av_packet_unref(pkt);
520  return ret;
521  }
522  }
523 
524  return 0;
525 }
526 
527 #define OFFSET(x) offsetof(librav1eContext, x)
528 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
529 
530 static const AVOption options[] = {
531  { "qp", "use constant quantizer mode", OFFSET(quantizer), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 255, VE },
532  { "speed", "what speed preset to use", OFFSET(speed), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 10, VE },
533  { "tiles", "number of tiles encode with", OFFSET(tiles), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
534  { "tile-rows", "number of tiles rows to encode with", OFFSET(tile_rows), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
535  { "tile-columns", "number of tiles columns to encode with", OFFSET(tile_cols), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
536  { "rav1e-params", "set the rav1e configuration using a :-separated list of key=value parameters", OFFSET(rav1e_opts), AV_OPT_TYPE_DICT, { 0 }, 0, 0, VE },
537  { NULL }
538 };
539 
541  { "b", "0" },
542  { "g", "0" },
543  { "keyint_min", "0" },
544  { "qmax", "-1" },
545  { "qmin", "-1" },
546  { NULL }
547 };
548 
563 };
564 
565 static const AVClass class = {
566  .class_name = "librav1e",
567  .item_name = av_default_item_name,
568  .option = options,
570 };
571 
573  .name = "librav1e",
574  .long_name = NULL_IF_CONFIG_SMALL("librav1e AV1"),
575  .type = AVMEDIA_TYPE_VIDEO,
576  .id = AV_CODEC_ID_AV1,
577  .init = librav1e_encode_init,
578  .send_frame = librav1e_send_frame,
579  .receive_packet = librav1e_receive_packet,
580  .close = librav1e_encode_close,
581  .priv_data_size = sizeof(librav1eContext),
582  .priv_class = &class,
583  .defaults = librav1e_defaults,
586  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
587  .wrapper_name = "librav1e",
588 };
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:48
void av_bsf_free(AVBSFContext **ctx)
Free a bitstream filter context and everything associated with it; write NULL into the supplied point...
Definition: bsf.c:35
#define NULL
Definition: coverity.c:32
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
static enum AVPixelFormat pix_fmt
static int shift(int a, int b)
Definition: sonic.c:82
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2549
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:556
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
AVOption.
Definition: opt.h:246
uint8_t * pass_data
Definition: librav1e.c:41
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
int64_t bit_rate
the average bitrate
Definition: avcodec.h:1671
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
const char * desc
Definition: nvenc.c:68
AVCodec ff_librav1e_encoder
Definition: librav1e.c:572
static RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
Definition: librav1e.c:94
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:2256
int num
Numerator.
Definition: rational.h:59
The bitstream filter state.
Definition: avcodec.h:5843
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
AVDictionary * rav1e_opts
Definition: librav1e.c:45
const AVBitStreamFilter * av_bsf_get_by_name(const char *name)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1831
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:401
char * stats_in
pass2 encoding statistics input buffer Concatenated stuff from stats_out of pass1 should be placed he...
Definition: avcodec.h:2648
#define OFFSET(x)
Definition: librav1e.c:527
static AVPacket pkt
#define AV_CODEC_CAP_AUTO_THREADS
Codec supports avctx->thread_count == 0 (auto).
Definition: avcodec.h:1067
AVCodec.
Definition: avcodec.h:3555
int av_bsf_init(AVBSFContext *ctx)
Prepare the filter for use, after all the parameters and options have been set.
Definition: bsf.c:139
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
Definition: avcodec.h:1744
int av_bsf_alloc(const AVBitStreamFilter *filter, AVBSFContext **ctx)
Allocate a context for a given bitstream filter.
Definition: bsf.c:86
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: avcodec.h:1024
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
int av_bsf_receive_packet(AVBSFContext *ctx, AVPacket *pkt)
Retrieve a filtered packet.
Definition: bsf.c:219
static int librav1e_send_frame(AVCodecContext *avctx, const AVFrame *frame)
Definition: librav1e.c:402
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
#define av_cold
Definition: attributes.h:82
#define av_malloc(s)
static av_cold int librav1e_encode_init(AVCodecContext *avctx)
Definition: librav1e.c:175
AVOptions.
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
uint8_t * data
Definition: avcodec.h:1533
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
#define AVERROR_EOF
End of file.
Definition: error.h:55
AVColorRange
MPEG vs JPEG YUV range.
Definition: pixfmt.h:532
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:402
char * stats_out
pass1 encoding statistics output buffer
Definition: avcodec.h:2640
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:2263
#define av_log(a,...)
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1565
static int get_stats(AVCodecContext *avctx, int eos)
Definition: librav1e.c:106
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:86
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
int qmax
maximum quantizer
Definition: avcodec.h:2470
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:1701
simple assert() macros that are a bit more flexible than ISO C assert().
const char * name
Name of the codec implementation.
Definition: avcodec.h:3562
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:400
static const AVCodecDefault defaults[]
Definition: amfenc_h264.c:361
char * av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size)
Encode data to base64 and null-terminate.
Definition: base64.c:138
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1539
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
common internal API header
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:2750
#define AV_BASE64_SIZE(x)
Calculate the output size needed to base64-encode x bytes to a null-terminated string.
Definition: base64.h:66
static const AVCodecDefault librav1e_defaults[]
Definition: librav1e.c:540
static av_cold int librav1e_encode_close(AVCodecContext *avctx)
Definition: librav1e.c:160
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
int width
picture width / height.
Definition: avcodec.h:1794
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:558
int av_bsf_send_packet(AVBSFContext *ctx, AVPacket *pkt)
Submit a packet for filtering.
Definition: bsf.c:192
#define AV_CODEC_FLAG_PASS1
Use internal 2pass ratecontrol in first pass mode.
Definition: avcodec.h:889
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:2235
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:476
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:2761
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:1753
static RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
Definition: librav1e.c:71
int draining
checks API usage: after codec draining, flush is required to resume operation
Definition: internal.h:201
int thread_count
thread count is used to decide how many independent tasks should be passed to execute() ...
Definition: avcodec.h:2880
the normal 2^n-1 "JPEG" YUV ranges
Definition: pixfmt.h:535
enum AVPixelFormat librav1e_pix_fmts[]
Definition: librav1e.c:549
static int set_stats(AVCodecContext *avctx)
Definition: librav1e.c:145
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:368
AVBSFContext * bsf
Definition: librav1e.c:39
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
main external API structure.
Definition: avcodec.h:1621
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:600
int qmin
minimum quantizer
Definition: avcodec.h:2463
void * buf
Definition: avisynth_c.h:766
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:397
static RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
Definition: librav1e.c:53
Describe the class of an AVClass context structure.
Definition: log.h:67
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:2249
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:2242
int avcodec_parameters_from_context(AVCodecParameters *par, const AVCodecContext *codec)
Fill the parameters struct based on the values from the supplied codec context.
Definition: utils.c:2069
static int librav1e_receive_packet(AVCodecContext *avctx, AVPacket *pkt)
Definition: librav1e.c:446
#define VE
Definition: librav1e.c:528
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:398
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:404
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
Definition: avcodec.h:922
the normal 219*2^(n-8) "MPEG" YUV ranges
Definition: pixfmt.h:534
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
Definition: avcodec.h:1816
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
common internal api header.
common internal and external API header
size_t pass_pos
Definition: librav1e.c:42
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
char * key
Definition: dict.h:86
int den
Denominator.
Definition: rational.h:60
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
#define AV_CODEC_FLAG_PASS2
Use internal 2pass ratecontrol in second pass mode.
Definition: avcodec.h:893
void * priv_data
Definition: avcodec.h:1648
RaContext * ctx
Definition: librav1e.c:38
char * value
Definition: dict.h:87
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1656
int av_base64_decode(uint8_t *out, const char *in_str, int out_size)
Decode a base64-encoded string.
Definition: base64.c:79
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1532
int height
Definition: frame.h:353
#define av_freep(p)
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key, ignoring the suffix of the found key string.
Definition: dict.h:70
AVChromaLocation
Location of chroma samples.
Definition: pixfmt.h:554
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
static const AVOption options[]
Definition: librav1e.c:530
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
This structure stores compressed data.
Definition: avcodec.h:1510
AVCodecParameters * par_in
Parameters of the input stream.
Definition: avcodec.h:5871
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1526
int keyint_min
minimum GOP size
Definition: avcodec.h:2202
static uint8_t tmp[11]
Definition: aes_ctr.c:26