FFmpeg
librav1e.c
Go to the documentation of this file.
1 /*
2  * librav1e encoder
3  *
4  * Copyright (c) 2019 Derek Buitenhuis
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <rav1e.h>
24 
25 #include "libavutil/internal.h"
26 #include "libavutil/avassert.h"
27 #include "libavutil/base64.h"
28 #include "libavutil/common.h"
29 #include "libavutil/mathematics.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/pixdesc.h"
32 #include "avcodec.h"
33 #include "internal.h"
34 
35 typedef struct librav1eContext {
36  const AVClass *class;
37 
38  RaContext *ctx;
40 
42  size_t pass_pos;
43  int pass_size;
44 
45  char *rav1e_opts;
46  int quantizer;
47  int speed;
48  int tiles;
49  int tile_rows;
50  int tile_cols;
52 
53 static inline RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
54 {
55  switch (pix_fmt) {
59  return RA_PIXEL_RANGE_FULL;
60  }
61 
62  switch (range) {
63  case AVCOL_RANGE_JPEG:
64  return RA_PIXEL_RANGE_FULL;
65  case AVCOL_RANGE_MPEG:
66  default:
67  return RA_PIXEL_RANGE_LIMITED;
68  }
69 }
70 
71 static inline RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
72 {
73  switch (pix_fmt) {
74  case AV_PIX_FMT_YUV420P:
78  return RA_CHROMA_SAMPLING_CS420;
79  case AV_PIX_FMT_YUV422P:
83  return RA_CHROMA_SAMPLING_CS422;
84  case AV_PIX_FMT_YUV444P:
88  return RA_CHROMA_SAMPLING_CS444;
89  default:
90  av_assert0(0);
91  }
92 }
93 
94 static inline RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
95 {
96  switch (chroma_loc) {
97  case AVCHROMA_LOC_LEFT:
98  return RA_CHROMA_SAMPLE_POSITION_VERTICAL;
100  return RA_CHROMA_SAMPLE_POSITION_COLOCATED;
101  default:
102  return RA_CHROMA_SAMPLE_POSITION_UNKNOWN;
103  }
104 }
105 
106 static int get_stats(AVCodecContext *avctx, int eos)
107 {
108  librav1eContext *ctx = avctx->priv_data;
109  RaData* buf = rav1e_twopass_out(ctx->ctx);
110  if (!buf)
111  return 0;
112 
113  if (!eos) {
115  ctx->pass_pos + buf->len);
116  if (!tmp) {
117  rav1e_data_unref(buf);
118  return AVERROR(ENOMEM);
119  }
120 
121  ctx->pass_data = tmp;
122  memcpy(ctx->pass_data + ctx->pass_pos, buf->data, buf->len);
123  ctx->pass_pos += buf->len;
124  } else {
125  size_t b64_size = AV_BASE64_SIZE(ctx->pass_pos);
126 
127  memcpy(ctx->pass_data, buf->data, buf->len);
128 
129  avctx->stats_out = av_malloc(b64_size);
130  if (!avctx->stats_out) {
131  rav1e_data_unref(buf);
132  return AVERROR(ENOMEM);
133  }
134 
135  av_base64_encode(avctx->stats_out, b64_size, ctx->pass_data, ctx->pass_pos);
136 
137  av_freep(&ctx->pass_data);
138  }
139 
140  rav1e_data_unref(buf);
141 
142  return 0;
143 }
144 
145 static int set_stats(AVCodecContext *avctx)
146 {
147  librav1eContext *ctx = avctx->priv_data;
148  int ret = 1;
149 
150  while (ret > 0 && ctx->pass_size - ctx->pass_pos > 0) {
151  ret = rav1e_twopass_in(ctx->ctx, ctx->pass_data + ctx->pass_pos, ctx->pass_size);
152  if (ret < 0)
153  return AVERROR_EXTERNAL;
154  ctx->pass_pos += ret;
155  }
156 
157  return 0;
158 }
159 
161 {
162  librav1eContext *ctx = avctx->priv_data;
163 
164  if (ctx->ctx) {
165  rav1e_context_unref(ctx->ctx);
166  ctx->ctx = NULL;
167  }
168 
169  av_bsf_free(&ctx->bsf);
170  av_freep(&ctx->pass_data);
171 
172  return 0;
173 }
174 
176 {
177  librav1eContext *ctx = avctx->priv_data;
179  RaConfig *cfg = NULL;
180  int rret;
181  int ret = 0;
182 
183  cfg = rav1e_config_default();
184  if (!cfg) {
185  av_log(avctx, AV_LOG_ERROR, "Could not allocate rav1e config.\n");
186  return AVERROR_EXTERNAL;
187  }
188 
189  rav1e_config_set_time_base(cfg, (RaRational) {
190  avctx->time_base.num * avctx->ticks_per_frame,
191  avctx->time_base.den
192  });
193 
194  if (avctx->flags & AV_CODEC_FLAG_PASS2) {
195  if (!avctx->stats_in) {
196  av_log(avctx, AV_LOG_ERROR, "No stats file provided for second pass.\n");
197  ret = AVERROR(EINVAL);
198  goto end;
199  }
200 
201  ctx->pass_size = (strlen(avctx->stats_in) * 3) / 4;
202  ctx->pass_data = av_malloc(ctx->pass_size);
203  if (!ctx->pass_data) {
204  av_log(avctx, AV_LOG_ERROR, "Could not allocate stats buffer.\n");
205  ret = AVERROR(ENOMEM);
206  goto end;
207  }
208 
209  ctx->pass_size = av_base64_decode(ctx->pass_data, avctx->stats_in, ctx->pass_size);
210  if (ctx->pass_size < 0) {
211  av_log(avctx, AV_LOG_ERROR, "Invalid pass file.\n");
212  ret = AVERROR(EINVAL);
213  goto end;
214  }
215  }
216 
217  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
218  const AVBitStreamFilter *filter = av_bsf_get_by_name("extract_extradata");
219  int bret;
220 
221  if (!filter) {
222  av_log(avctx, AV_LOG_ERROR, "extract_extradata bitstream filter "
223  "not found. This is a bug, please report it.\n");
224  ret = AVERROR_BUG;
225  goto end;
226  }
227 
228  bret = av_bsf_alloc(filter, &ctx->bsf);
229  if (bret < 0) {
230  ret = bret;
231  goto end;
232  }
233 
234  bret = avcodec_parameters_from_context(ctx->bsf->par_in, avctx);
235  if (bret < 0) {
236  ret = bret;
237  goto end;
238  }
239 
240  bret = av_bsf_init(ctx->bsf);
241  if (bret < 0) {
242  ret = bret;
243  goto end;
244  }
245  }
246 
247  if (ctx->rav1e_opts) {
248  AVDictionary *dict = NULL;
249  AVDictionaryEntry *en = NULL;
250 
251  if (!av_dict_parse_string(&dict, ctx->rav1e_opts, "=", ":", 0)) {
252  while (en = av_dict_get(dict, "", en, AV_DICT_IGNORE_SUFFIX)) {
253  int parse_ret = rav1e_config_parse(cfg, en->key, en->value);
254  if (parse_ret < 0)
255  av_log(avctx, AV_LOG_WARNING, "Invalid value for %s: %s.\n", en->key, en->value);
256  }
257  av_dict_free(&dict);
258  }
259  }
260 
261  rret = rav1e_config_parse_int(cfg, "width", avctx->width);
262  if (rret < 0) {
263  av_log(avctx, AV_LOG_ERROR, "Invalid width passed to rav1e.\n");
264  ret = AVERROR_INVALIDDATA;
265  goto end;
266  }
267 
268  rret = rav1e_config_parse_int(cfg, "height", avctx->height);
269  if (rret < 0) {
270  av_log(avctx, AV_LOG_ERROR, "Invalid height passed to rav1e.\n");
271  ret = AVERROR_INVALIDDATA;
272  goto end;
273  }
274 
275  rret = rav1e_config_parse_int(cfg, "threads", avctx->thread_count);
276  if (rret < 0)
277  av_log(avctx, AV_LOG_WARNING, "Invalid number of threads, defaulting to auto.\n");
278 
279  if (ctx->speed >= 0) {
280  rret = rav1e_config_parse_int(cfg, "speed", ctx->speed);
281  if (rret < 0) {
282  av_log(avctx, AV_LOG_ERROR, "Could not set speed preset.\n");
283  ret = AVERROR_EXTERNAL;
284  goto end;
285  }
286  }
287 
288  /* rav1e handles precedence between 'tiles' and cols/rows for us. */
289  if (ctx->tiles > 0) {
290  rret = rav1e_config_parse_int(cfg, "tiles", ctx->tiles);
291  if (rret < 0) {
292  av_log(avctx, AV_LOG_ERROR, "Could not set number of tiles to encode with.\n");
293  ret = AVERROR_EXTERNAL;
294  goto end;
295  }
296  }
297  if (ctx->tile_rows > 0) {
298  rret = rav1e_config_parse_int(cfg, "tile_rows", ctx->tile_rows);
299  if (rret < 0) {
300  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile rows to encode with.\n");
301  ret = AVERROR_EXTERNAL;
302  goto end;
303  }
304  }
305  if (ctx->tile_cols > 0) {
306  rret = rav1e_config_parse_int(cfg, "tile_cols", ctx->tile_cols);
307  if (rret < 0) {
308  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile cols to encode with.\n");
309  ret = AVERROR_EXTERNAL;
310  goto end;
311  }
312  }
313 
314  if (avctx->gop_size > 0) {
315  rret = rav1e_config_parse_int(cfg, "key_frame_interval", avctx->gop_size);
316  if (rret < 0) {
317  av_log(avctx, AV_LOG_ERROR, "Could not set max keyint.\n");
318  ret = AVERROR_EXTERNAL;
319  goto end;
320  }
321  }
322 
323  if (avctx->keyint_min > 0) {
324  rret = rav1e_config_parse_int(cfg, "min_key_frame_interval", avctx->keyint_min);
325  if (rret < 0) {
326  av_log(avctx, AV_LOG_ERROR, "Could not set min keyint.\n");
327  ret = AVERROR_EXTERNAL;
328  goto end;
329  }
330  }
331 
332  if (avctx->bit_rate && ctx->quantizer < 0) {
333  int max_quantizer = avctx->qmax >= 0 ? avctx->qmax : 255;
334 
335  rret = rav1e_config_parse_int(cfg, "quantizer", max_quantizer);
336  if (rret < 0) {
337  av_log(avctx, AV_LOG_ERROR, "Could not set max quantizer.\n");
338  ret = AVERROR_EXTERNAL;
339  goto end;
340  }
341 
342  if (avctx->qmin >= 0) {
343  rret = rav1e_config_parse_int(cfg, "min_quantizer", avctx->qmin);
344  if (rret < 0) {
345  av_log(avctx, AV_LOG_ERROR, "Could not set min quantizer.\n");
346  ret = AVERROR_EXTERNAL;
347  goto end;
348  }
349  }
350 
351  rret = rav1e_config_parse_int(cfg, "bitrate", avctx->bit_rate);
352  if (rret < 0) {
353  av_log(avctx, AV_LOG_ERROR, "Could not set bitrate.\n");
354  ret = AVERROR_INVALIDDATA;
355  goto end;
356  }
357  } else if (ctx->quantizer >= 0) {
358  if (avctx->bit_rate)
359  av_log(avctx, AV_LOG_WARNING, "Both bitrate and quantizer specified. Using quantizer mode.");
360 
361  rret = rav1e_config_parse_int(cfg, "quantizer", ctx->quantizer);
362  if (rret < 0) {
363  av_log(avctx, AV_LOG_ERROR, "Could not set quantizer.\n");
364  ret = AVERROR_EXTERNAL;
365  goto end;
366  }
367  }
368 
369  rret = rav1e_config_set_pixel_format(cfg, desc->comp[0].depth,
370  pix_fmt_map(avctx->pix_fmt),
372  range_map(avctx->pix_fmt, avctx->color_range));
373  if (rret < 0) {
374  av_log(avctx, AV_LOG_ERROR, "Failed to set pixel format properties.\n");
375  ret = AVERROR_INVALIDDATA;
376  goto end;
377  }
378 
379  /* rav1e's colorspace enums match standard values. */
380  rret = rav1e_config_set_color_description(cfg, (RaMatrixCoefficients) avctx->colorspace,
381  (RaColorPrimaries) avctx->color_primaries,
382  (RaTransferCharacteristics) avctx->color_trc);
383  if (rret < 0) {
384  av_log(avctx, AV_LOG_WARNING, "Failed to set color properties.\n");
385  if (avctx->err_recognition & AV_EF_EXPLODE) {
386  ret = AVERROR_INVALIDDATA;
387  goto end;
388  }
389  }
390 
391  ctx->ctx = rav1e_context_new(cfg);
392  if (!ctx->ctx) {
393  av_log(avctx, AV_LOG_ERROR, "Failed to create rav1e encode context.\n");
394  ret = AVERROR_EXTERNAL;
395  goto end;
396  }
397 
398  ret = 0;
399 
400 end:
401 
402  rav1e_config_unref(cfg);
403 
404  return ret;
405 }
406 
408 {
409  librav1eContext *ctx = avctx->priv_data;
410  RaFrame *rframe = NULL;
411  int ret;
412 
413  if (frame) {
415 
416  rframe = rav1e_frame_new(ctx->ctx);
417  if (!rframe) {
418  av_log(avctx, AV_LOG_ERROR, "Could not allocate new rav1e frame.\n");
419  return AVERROR(ENOMEM);
420  }
421 
422  for (int i = 0; i < desc->nb_components; i++) {
423  int shift = i ? desc->log2_chroma_h : 0;
424  int bytes = desc->comp[0].depth == 8 ? 1 : 2;
425  rav1e_frame_fill_plane(rframe, i, frame->data[i],
426  (frame->height >> shift) * frame->linesize[i],
427  frame->linesize[i], bytes);
428  }
429  }
430 
431  ret = rav1e_send_frame(ctx->ctx, rframe);
432  if (rframe)
433  rav1e_frame_unref(rframe); /* No need to unref if flushing. */
434 
435  switch (ret) {
436  case RA_ENCODER_STATUS_SUCCESS:
437  break;
438  case RA_ENCODER_STATUS_ENOUGH_DATA:
439  return AVERROR(EAGAIN);
440  case RA_ENCODER_STATUS_FAILURE:
441  av_log(avctx, AV_LOG_ERROR, "Could not send frame: %s\n", rav1e_status_to_str(ret));
442  return AVERROR_EXTERNAL;
443  default:
444  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_send_frame: %s\n", ret, rav1e_status_to_str(ret));
445  return AVERROR_UNKNOWN;
446  }
447 
448  return 0;
449 }
450 
452 {
453  librav1eContext *ctx = avctx->priv_data;
454  RaPacket *rpkt = NULL;
455  int ret;
456 
457 retry:
458 
459  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
460  int sret = get_stats(avctx, 0);
461  if (sret < 0)
462  return sret;
463  } else if (avctx->flags & AV_CODEC_FLAG_PASS2) {
464  int sret = set_stats(avctx);
465  if (sret < 0)
466  return sret;
467  }
468 
469  ret = rav1e_receive_packet(ctx->ctx, &rpkt);
470  switch (ret) {
471  case RA_ENCODER_STATUS_SUCCESS:
472  break;
473  case RA_ENCODER_STATUS_LIMIT_REACHED:
474  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
475  int sret = get_stats(avctx, 1);
476  if (sret < 0)
477  return sret;
478  }
479  return AVERROR_EOF;
480  case RA_ENCODER_STATUS_ENCODED:
481  if (avctx->internal->draining)
482  goto retry;
483  return AVERROR(EAGAIN);
484  case RA_ENCODER_STATUS_NEED_MORE_DATA:
485  if (avctx->internal->draining) {
486  av_log(avctx, AV_LOG_ERROR, "Unexpected error when receiving packet after EOF.\n");
487  return AVERROR_EXTERNAL;
488  }
489  return AVERROR(EAGAIN);
490  case RA_ENCODER_STATUS_FAILURE:
491  av_log(avctx, AV_LOG_ERROR, "Could not encode frame: %s\n", rav1e_status_to_str(ret));
492  return AVERROR_EXTERNAL;
493  default:
494  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_receive_packet: %s\n", ret, rav1e_status_to_str(ret));
495  return AVERROR_UNKNOWN;
496  }
497 
498  ret = av_new_packet(pkt, rpkt->len);
499  if (ret < 0) {
500  av_log(avctx, AV_LOG_ERROR, "Could not allocate packet.\n");
501  rav1e_packet_unref(rpkt);
502  return ret;
503  }
504 
505  memcpy(pkt->data, rpkt->data, rpkt->len);
506 
507  if (rpkt->frame_type == RA_FRAME_TYPE_KEY)
508  pkt->flags |= AV_PKT_FLAG_KEY;
509 
510  pkt->pts = pkt->dts = rpkt->input_frameno * avctx->ticks_per_frame;
511  rav1e_packet_unref(rpkt);
512 
513  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
514  int ret = av_bsf_send_packet(ctx->bsf, pkt);
515  if (ret < 0) {
516  av_log(avctx, AV_LOG_ERROR, "extradata extraction send failed.\n");
517  av_packet_unref(pkt);
518  return ret;
519  }
520 
521  ret = av_bsf_receive_packet(ctx->bsf, pkt);
522  if (ret < 0) {
523  av_log(avctx, AV_LOG_ERROR, "extradata extraction receive failed.\n");
524  av_packet_unref(pkt);
525  return ret;
526  }
527  }
528 
529  return 0;
530 }
531 
532 #define OFFSET(x) offsetof(librav1eContext, x)
533 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
534 
535 static const AVOption options[] = {
536  { "qp", "use constant quantizer mode", OFFSET(quantizer), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 255, VE },
537  { "speed", "what speed preset to use", OFFSET(speed), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 10, VE },
538  { "tiles", "number of tiles encode with", OFFSET(tiles), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
539  { "tile-rows", "number of tiles rows to encode with", OFFSET(tile_rows), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
540  { "tile-columns", "number of tiles columns to encode with", OFFSET(tile_cols), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
541  { "rav1e-params", "set the rav1e configuration using a :-separated list of key=value parameters", OFFSET(rav1e_opts), AV_OPT_TYPE_STRING, { 0 }, 0, 0, VE },
542  { NULL }
543 };
544 
546  { "b", "0" },
547  { "g", "0" },
548  { "keyint_min", "0" },
549  { "qmax", "-1" },
550  { "qmin", "-1" },
551  { NULL }
552 };
553 
568 };
569 
570 static const AVClass class = {
571  .class_name = "librav1e",
572  .item_name = av_default_item_name,
573  .option = options,
575 };
576 
578  .name = "librav1e",
579  .long_name = NULL_IF_CONFIG_SMALL("librav1e AV1"),
580  .type = AVMEDIA_TYPE_VIDEO,
581  .id = AV_CODEC_ID_AV1,
582  .init = librav1e_encode_init,
583  .send_frame = librav1e_send_frame,
584  .receive_packet = librav1e_receive_packet,
585  .close = librav1e_encode_close,
586  .priv_data_size = sizeof(librav1eContext),
587  .priv_class = &class,
588  .defaults = librav1e_defaults,
591  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
592  .wrapper_name = "librav1e",
593 };
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:48
void av_bsf_free(AVBSFContext **ctx)
Free a bitstream filter context and everything associated with it; write NULL into the supplied point...
Definition: bsf.c:35
#define NULL
Definition: coverity.c:32
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
static enum AVPixelFormat pix_fmt
static int shift(int a, int b)
Definition: sonic.c:82
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2522
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:544
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
AVOption.
Definition: opt.h:246
uint8_t * pass_data
Definition: librav1e.c:41
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
int64_t bit_rate
the average bitrate
Definition: avcodec.h:1620
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
const char * desc
Definition: nvenc.c:68
AVCodec ff_librav1e_encoder
Definition: librav1e.c:577
static RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
Definition: librav1e.c:94
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:2205
int num
Numerator.
Definition: rational.h:59
The bitstream filter state.
Definition: avcodec.h:5776
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:191
const AVBitStreamFilter * av_bsf_get_by_name(const char *name)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1780
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:391
char * stats_in
pass2 encoding statistics input buffer Concatenated stuff from stats_out of pass1 should be placed he...
Definition: avcodec.h:2597
#define OFFSET(x)
Definition: librav1e.c:532
static AVPacket pkt
#define AV_CODEC_CAP_AUTO_THREADS
Codec supports avctx->thread_count == 0 (auto).
Definition: avcodec.h:1054
AVCodec.
Definition: avcodec.h:3494
int av_bsf_init(AVBSFContext *ctx)
Prepare the filter for use, after all the parameters and options have been set.
Definition: bsf.c:135
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented...
Definition: avcodec.h:1693
int av_bsf_alloc(const AVBitStreamFilter *filter, AVBSFContext **ctx)
Allocate a context for a given bitstream filter.
Definition: bsf.c:82
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: avcodec.h:1011
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
int av_bsf_receive_packet(AVBSFContext *ctx, AVPacket *pkt)
Retrieve a filtered packet.
Definition: bsf.c:212
static int librav1e_send_frame(AVCodecContext *avctx, const AVFrame *frame)
Definition: librav1e.c:407
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
#define av_cold
Definition: attributes.h:82
#define av_malloc(s)
static av_cold int librav1e_encode_init(AVCodecContext *avctx)
Definition: librav1e.c:175
AVOptions.
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:90
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
uint8_t * data
Definition: avcodec.h:1482
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
#define AVERROR_EOF
End of file.
Definition: error.h:55
AVColorRange
MPEG vs JPEG YUV range.
Definition: pixfmt.h:520
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:392
char * stats_out
pass1 encoding statistics output buffer
Definition: avcodec.h:2589
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:2212
#define av_log(a,...)
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1514
static int get_stats(AVCodecContext *avctx, int eos)
Definition: librav1e.c:106
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:86
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:101
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
int qmax
maximum quantizer
Definition: avcodec.h:2419
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values. ...
Definition: dict.c:203
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:1650
simple assert() macros that are a bit more flexible than ISO C assert().
const char * name
Name of the codec implementation.
Definition: avcodec.h:3501
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:390
static const AVCodecDefault defaults[]
Definition: amfenc_h264.c:361
char * av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size)
Encode data to base64 and null-terminate.
Definition: base64.c:138
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1488
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
common internal API header
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:2699
#define AV_BASE64_SIZE(x)
Calculate the output size needed to base64-encode x bytes to a null-terminated string.
Definition: base64.h:66
static const AVCodecDefault librav1e_defaults[]
Definition: librav1e.c:545
static av_cold int librav1e_encode_close(AVCodecContext *avctx)
Definition: librav1e.c:160
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
int width
picture width / height.
Definition: avcodec.h:1743
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:546
int av_bsf_send_packet(AVBSFContext *ctx, AVPacket *pkt)
Submit a packet for filtering.
Definition: bsf.c:186
#define AV_CODEC_FLAG_PASS1
Use internal 2pass ratecontrol in first pass mode.
Definition: avcodec.h:876
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:2184
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:476
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:2710
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:1702
static RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
Definition: librav1e.c:71
int draining
checks API usage: after codec draining, flush is required to resume operation
Definition: internal.h:195
int thread_count
thread count is used to decide how many independent tasks should be passed to execute() ...
Definition: avcodec.h:2829
the normal 2^n-1 "JPEG" YUV ranges
Definition: pixfmt.h:523
enum AVPixelFormat librav1e_pix_fmts[]
Definition: librav1e.c:554
static int set_stats(AVCodecContext *avctx)
Definition: librav1e.c:145
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:368
int av_dict_parse_string(AVDictionary **pm, const char *str, const char *key_val_sep, const char *pairs_sep, int flags)
Parse the key/value pairs list and add the parsed entries to a dictionary.
Definition: dict.c:180
AVBSFContext * bsf
Definition: librav1e.c:39
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
main external API structure.
Definition: avcodec.h:1570
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:599
int qmin
minimum quantizer
Definition: avcodec.h:2412
void * buf
Definition: avisynth_c.h:766
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:387
static RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
Definition: librav1e.c:53
Describe the class of an AVClass context structure.
Definition: log.h:67
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:2198
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:2191
int avcodec_parameters_from_context(AVCodecParameters *par, const AVCodecContext *codec)
Fill the parameters struct based on the values from the supplied codec context.
Definition: utils.c:2058
static int librav1e_receive_packet(AVCodecContext *avctx, AVPacket *pkt)
Definition: librav1e.c:451
#define VE
Definition: librav1e.c:533
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:388
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:394
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
Definition: avcodec.h:909
the normal 219*2^(n-8) "MPEG" YUV ranges
Definition: pixfmt.h:522
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
Definition: avcodec.h:1765
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
common internal api header.
common internal and external API header
size_t pass_pos
Definition: librav1e.c:42
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
char * key
Definition: dict.h:86
int den
Denominator.
Definition: rational.h:60
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
#define AV_CODEC_FLAG_PASS2
Use internal 2pass ratecontrol in second pass mode.
Definition: avcodec.h:880
void * priv_data
Definition: avcodec.h:1597
RaContext * ctx
Definition: librav1e.c:38
char * value
Definition: dict.h:87
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:1605
int av_base64_decode(uint8_t *out, const char *in_str, int out_size)
Decode a base64-encoded string.
Definition: base64.c:79
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1481
int height
Definition: frame.h:353
#define av_freep(p)
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key, ignoring the suffix of the found key string.
Definition: dict.h:70
AVChromaLocation
Location of chroma samples.
Definition: pixfmt.h:542
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
int depth
Number of bits in the component.
Definition: pixdesc.h:58
static const AVOption options[]
Definition: librav1e.c:535
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
This structure stores compressed data.
Definition: avcodec.h:1459
AVCodecParameters * par_in
Parameters of the input stream.
Definition: avcodec.h:5804
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1475
char * rav1e_opts
Definition: librav1e.c:45
int keyint_min
minimum GOP size
Definition: avcodec.h:2151
static uint8_t tmp[11]
Definition: aes_ctr.c:26