FFmpeg
librav1e.c
Go to the documentation of this file.
1 /*
2  * librav1e encoder
3  *
4  * Copyright (c) 2019 Derek Buitenhuis
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <rav1e.h>
24 
25 #include "libavutil/internal.h"
26 #include "libavutil/avassert.h"
27 #include "libavutil/base64.h"
28 #include "libavutil/common.h"
29 #include "libavutil/mathematics.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/pixdesc.h"
32 #include "avcodec.h"
33 #include "encode.h"
34 #include "internal.h"
35 
36 typedef struct librav1eContext {
37  const AVClass *class;
38 
39  RaContext *ctx;
41  RaFrame *rframe;
43 
45  size_t pass_pos;
46  int pass_size;
47 
49  int quantizer;
50  int speed;
51  int tiles;
52  int tile_rows;
53  int tile_cols;
55 
56 static inline RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
57 {
58  switch (pix_fmt) {
62  return RA_PIXEL_RANGE_FULL;
63  }
64 
65  switch (range) {
66  case AVCOL_RANGE_JPEG:
67  return RA_PIXEL_RANGE_FULL;
68  case AVCOL_RANGE_MPEG:
69  default:
70  return RA_PIXEL_RANGE_LIMITED;
71  }
72 }
73 
74 static inline RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
75 {
76  switch (pix_fmt) {
77  case AV_PIX_FMT_YUV420P:
81  return RA_CHROMA_SAMPLING_CS420;
82  case AV_PIX_FMT_YUV422P:
86  return RA_CHROMA_SAMPLING_CS422;
87  case AV_PIX_FMT_YUV444P:
91  return RA_CHROMA_SAMPLING_CS444;
92  default:
93  av_assert0(0);
94  }
95 }
96 
97 static inline RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
98 {
99  switch (chroma_loc) {
100  case AVCHROMA_LOC_LEFT:
101  return RA_CHROMA_SAMPLE_POSITION_VERTICAL;
103  return RA_CHROMA_SAMPLE_POSITION_COLOCATED;
104  default:
105  return RA_CHROMA_SAMPLE_POSITION_UNKNOWN;
106  }
107 }
108 
109 static int get_stats(AVCodecContext *avctx, int eos)
110 {
111  librav1eContext *ctx = avctx->priv_data;
112  RaData* buf = rav1e_twopass_out(ctx->ctx);
113  if (!buf)
114  return 0;
115 
116  if (!eos) {
117  uint8_t *tmp = av_fast_realloc(ctx->pass_data, &ctx->pass_size,
118  ctx->pass_pos + buf->len);
119  if (!tmp) {
120  rav1e_data_unref(buf);
121  return AVERROR(ENOMEM);
122  }
123 
124  ctx->pass_data = tmp;
125  memcpy(ctx->pass_data + ctx->pass_pos, buf->data, buf->len);
126  ctx->pass_pos += buf->len;
127  } else {
128  size_t b64_size = AV_BASE64_SIZE(ctx->pass_pos);
129 
130  memcpy(ctx->pass_data, buf->data, buf->len);
131 
132  avctx->stats_out = av_malloc(b64_size);
133  if (!avctx->stats_out) {
134  rav1e_data_unref(buf);
135  return AVERROR(ENOMEM);
136  }
137 
138  av_base64_encode(avctx->stats_out, b64_size, ctx->pass_data, ctx->pass_pos);
139 
140  av_freep(&ctx->pass_data);
141  }
142 
143  rav1e_data_unref(buf);
144 
145  return 0;
146 }
147 
148 static int set_stats(AVCodecContext *avctx)
149 {
150  librav1eContext *ctx = avctx->priv_data;
151  int ret = 1;
152 
153  while (ret > 0 && ctx->pass_size - ctx->pass_pos > 0) {
154  ret = rav1e_twopass_in(ctx->ctx, ctx->pass_data + ctx->pass_pos, ctx->pass_size);
155  if (ret < 0)
156  return AVERROR_EXTERNAL;
157  ctx->pass_pos += ret;
158  }
159 
160  return 0;
161 }
162 
164 {
165  librav1eContext *ctx = avctx->priv_data;
166 
167  if (ctx->ctx) {
168  rav1e_context_unref(ctx->ctx);
169  ctx->ctx = NULL;
170  }
171  if (ctx->rframe) {
172  rav1e_frame_unref(ctx->rframe);
173  ctx->rframe = NULL;
174  }
175 
176  av_frame_free(&ctx->frame);
177  av_bsf_free(&ctx->bsf);
178  av_freep(&ctx->pass_data);
179 
180  return 0;
181 }
182 
184 {
185  librav1eContext *ctx = avctx->priv_data;
187  RaConfig *cfg = NULL;
188  int rret;
189  int ret = 0;
190 
191  ctx->frame = av_frame_alloc();
192  if (!ctx->frame)
193  return AVERROR(ENOMEM);
194 
195  cfg = rav1e_config_default();
196  if (!cfg) {
197  av_log(avctx, AV_LOG_ERROR, "Could not allocate rav1e config.\n");
198  return AVERROR_EXTERNAL;
199  }
200 
201  /*
202  * Rav1e currently uses the time base given to it only for ratecontrol... where
203  * the inverse is taken and used as a framerate. So, do what we do in other wrappers
204  * and use the framerate if we can.
205  */
206  if (avctx->framerate.num > 0 && avctx->framerate.den > 0) {
207  rav1e_config_set_time_base(cfg, (RaRational) {
208  avctx->framerate.den, avctx->framerate.num
209  });
210  } else {
211  rav1e_config_set_time_base(cfg, (RaRational) {
212  avctx->time_base.num * avctx->ticks_per_frame,
213  avctx->time_base.den
214  });
215  }
216 
217  if ((avctx->flags & AV_CODEC_FLAG_PASS1 || avctx->flags & AV_CODEC_FLAG_PASS2) && !avctx->bit_rate) {
218  av_log(avctx, AV_LOG_ERROR, "A bitrate must be set to use two pass mode.\n");
220  goto end;
221  }
222 
223  if (avctx->flags & AV_CODEC_FLAG_PASS2) {
224  if (!avctx->stats_in) {
225  av_log(avctx, AV_LOG_ERROR, "No stats file provided for second pass.\n");
226  ret = AVERROR(EINVAL);
227  goto end;
228  }
229 
230  ctx->pass_size = (strlen(avctx->stats_in) * 3) / 4;
231  ctx->pass_data = av_malloc(ctx->pass_size);
232  if (!ctx->pass_data) {
233  av_log(avctx, AV_LOG_ERROR, "Could not allocate stats buffer.\n");
234  ret = AVERROR(ENOMEM);
235  goto end;
236  }
237 
238  ctx->pass_size = av_base64_decode(ctx->pass_data, avctx->stats_in, ctx->pass_size);
239  if (ctx->pass_size < 0) {
240  av_log(avctx, AV_LOG_ERROR, "Invalid pass file.\n");
241  ret = AVERROR(EINVAL);
242  goto end;
243  }
244  }
245 
246  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
247  const AVBitStreamFilter *filter = av_bsf_get_by_name("extract_extradata");
248  int bret;
249 
250  if (!filter) {
251  av_log(avctx, AV_LOG_ERROR, "extract_extradata bitstream filter "
252  "not found. This is a bug, please report it.\n");
253  ret = AVERROR_BUG;
254  goto end;
255  }
256 
257  bret = av_bsf_alloc(filter, &ctx->bsf);
258  if (bret < 0) {
259  ret = bret;
260  goto end;
261  }
262 
263  bret = avcodec_parameters_from_context(ctx->bsf->par_in, avctx);
264  if (bret < 0) {
265  ret = bret;
266  goto end;
267  }
268 
269  bret = av_bsf_init(ctx->bsf);
270  if (bret < 0) {
271  ret = bret;
272  goto end;
273  }
274  }
275 
276  {
277  AVDictionaryEntry *en = NULL;
278  while ((en = av_dict_get(ctx->rav1e_opts, "", en, AV_DICT_IGNORE_SUFFIX))) {
279  int parse_ret = rav1e_config_parse(cfg, en->key, en->value);
280  if (parse_ret < 0)
281  av_log(avctx, AV_LOG_WARNING, "Invalid value for %s: %s.\n", en->key, en->value);
282  }
283  }
284 
285  rret = rav1e_config_parse_int(cfg, "width", avctx->width);
286  if (rret < 0) {
287  av_log(avctx, AV_LOG_ERROR, "Invalid width passed to rav1e.\n");
289  goto end;
290  }
291 
292  rret = rav1e_config_parse_int(cfg, "height", avctx->height);
293  if (rret < 0) {
294  av_log(avctx, AV_LOG_ERROR, "Invalid height passed to rav1e.\n");
296  goto end;
297  }
298 
299  rret = rav1e_config_parse_int(cfg, "threads", avctx->thread_count);
300  if (rret < 0)
301  av_log(avctx, AV_LOG_WARNING, "Invalid number of threads, defaulting to auto.\n");
302 
303  if (ctx->speed >= 0) {
304  rret = rav1e_config_parse_int(cfg, "speed", ctx->speed);
305  if (rret < 0) {
306  av_log(avctx, AV_LOG_ERROR, "Could not set speed preset.\n");
308  goto end;
309  }
310  }
311 
312  /* rav1e handles precedence between 'tiles' and cols/rows for us. */
313  if (ctx->tiles > 0) {
314  rret = rav1e_config_parse_int(cfg, "tiles", ctx->tiles);
315  if (rret < 0) {
316  av_log(avctx, AV_LOG_ERROR, "Could not set number of tiles to encode with.\n");
318  goto end;
319  }
320  }
321  if (ctx->tile_rows > 0) {
322  rret = rav1e_config_parse_int(cfg, "tile_rows", ctx->tile_rows);
323  if (rret < 0) {
324  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile rows to encode with.\n");
326  goto end;
327  }
328  }
329  if (ctx->tile_cols > 0) {
330  rret = rav1e_config_parse_int(cfg, "tile_cols", ctx->tile_cols);
331  if (rret < 0) {
332  av_log(avctx, AV_LOG_ERROR, "Could not set number of tile cols to encode with.\n");
334  goto end;
335  }
336  }
337 
338  if (avctx->gop_size > 0) {
339  rret = rav1e_config_parse_int(cfg, "key_frame_interval", avctx->gop_size);
340  if (rret < 0) {
341  av_log(avctx, AV_LOG_ERROR, "Could not set max keyint.\n");
343  goto end;
344  }
345  }
346 
347  if (avctx->keyint_min > 0) {
348  rret = rav1e_config_parse_int(cfg, "min_key_frame_interval", avctx->keyint_min);
349  if (rret < 0) {
350  av_log(avctx, AV_LOG_ERROR, "Could not set min keyint.\n");
352  goto end;
353  }
354  }
355 
356  if (avctx->bit_rate && ctx->quantizer < 0) {
357  int max_quantizer = avctx->qmax >= 0 ? avctx->qmax : 255;
358 
359  rret = rav1e_config_parse_int(cfg, "quantizer", max_quantizer);
360  if (rret < 0) {
361  av_log(avctx, AV_LOG_ERROR, "Could not set max quantizer.\n");
363  goto end;
364  }
365 
366  if (avctx->qmin >= 0) {
367  rret = rav1e_config_parse_int(cfg, "min_quantizer", avctx->qmin);
368  if (rret < 0) {
369  av_log(avctx, AV_LOG_ERROR, "Could not set min quantizer.\n");
371  goto end;
372  }
373  }
374 
375  rret = rav1e_config_parse_int(cfg, "bitrate", avctx->bit_rate);
376  if (rret < 0) {
377  av_log(avctx, AV_LOG_ERROR, "Could not set bitrate.\n");
379  goto end;
380  }
381  } else if (ctx->quantizer >= 0) {
382  if (avctx->bit_rate)
383  av_log(avctx, AV_LOG_WARNING, "Both bitrate and quantizer specified. Using quantizer mode.");
384 
385  rret = rav1e_config_parse_int(cfg, "quantizer", ctx->quantizer);
386  if (rret < 0) {
387  av_log(avctx, AV_LOG_ERROR, "Could not set quantizer.\n");
389  goto end;
390  }
391  }
392 
393  rret = rav1e_config_set_pixel_format(cfg, desc->comp[0].depth,
394  pix_fmt_map(avctx->pix_fmt),
396  range_map(avctx->pix_fmt, avctx->color_range));
397  if (rret < 0) {
398  av_log(avctx, AV_LOG_ERROR, "Failed to set pixel format properties.\n");
400  goto end;
401  }
402 
403  /* rav1e's colorspace enums match standard values. */
404  rret = rav1e_config_set_color_description(cfg, (RaMatrixCoefficients) avctx->colorspace,
405  (RaColorPrimaries) avctx->color_primaries,
406  (RaTransferCharacteristics) avctx->color_trc);
407  if (rret < 0) {
408  av_log(avctx, AV_LOG_WARNING, "Failed to set color properties.\n");
409  if (avctx->err_recognition & AV_EF_EXPLODE) {
411  goto end;
412  }
413  }
414 
415  ctx->ctx = rav1e_context_new(cfg);
416  if (!ctx->ctx) {
417  av_log(avctx, AV_LOG_ERROR, "Failed to create rav1e encode context.\n");
419  goto end;
420  }
421 
422  ret = 0;
423 
424 end:
425 
426  rav1e_config_unref(cfg);
427 
428  return ret;
429 }
430 
432 {
433  librav1eContext *ctx = avctx->priv_data;
434  RaFrame *rframe = ctx->rframe;
435  RaPacket *rpkt = NULL;
436  int ret;
437 
438  if (!rframe) {
439  AVFrame *frame = ctx->frame;
440 
441  ret = ff_encode_get_frame(avctx, frame);
442  if (ret < 0 && ret != AVERROR_EOF)
443  return ret;
444 
445  if (frame->buf[0]) {
447 
448  int64_t *pts = av_malloc(sizeof(int64_t));
449  if (!pts) {
450  av_log(avctx, AV_LOG_ERROR, "Could not allocate PTS buffer.\n");
451  return AVERROR(ENOMEM);
452  }
453  *pts = frame->pts;
454 
455  rframe = rav1e_frame_new(ctx->ctx);
456  if (!rframe) {
457  av_log(avctx, AV_LOG_ERROR, "Could not allocate new rav1e frame.\n");
459  av_freep(&pts);
460  return AVERROR(ENOMEM);
461  }
462 
463  for (int i = 0; i < desc->nb_components; i++) {
464  int shift = i ? desc->log2_chroma_h : 0;
465  int bytes = desc->comp[0].depth == 8 ? 1 : 2;
466  rav1e_frame_fill_plane(rframe, i, frame->data[i],
467  (frame->height >> shift) * frame->linesize[i],
468  frame->linesize[i], bytes);
469  }
471  rav1e_frame_set_opaque(rframe, pts, av_free);
472  }
473  }
474 
475  ret = rav1e_send_frame(ctx->ctx, rframe);
476  if (rframe)
477  if (ret == RA_ENCODER_STATUS_ENOUGH_DATA) {
478  ctx->rframe = rframe; /* Queue is full. Store the RaFrame to retry next call */
479  } else {
480  rav1e_frame_unref(rframe); /* No need to unref if flushing. */
481  ctx->rframe = NULL;
482  }
483 
484  switch (ret) {
485  case RA_ENCODER_STATUS_SUCCESS:
486  case RA_ENCODER_STATUS_ENOUGH_DATA:
487  break;
488  case RA_ENCODER_STATUS_FAILURE:
489  av_log(avctx, AV_LOG_ERROR, "Could not send frame: %s\n", rav1e_status_to_str(ret));
490  return AVERROR_EXTERNAL;
491  default:
492  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_send_frame: %s\n", ret, rav1e_status_to_str(ret));
493  return AVERROR_UNKNOWN;
494  }
495 
496 retry:
497 
498  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
499  int sret = get_stats(avctx, 0);
500  if (sret < 0)
501  return sret;
502  } else if (avctx->flags & AV_CODEC_FLAG_PASS2) {
503  int sret = set_stats(avctx);
504  if (sret < 0)
505  return sret;
506  }
507 
508  ret = rav1e_receive_packet(ctx->ctx, &rpkt);
509  switch (ret) {
510  case RA_ENCODER_STATUS_SUCCESS:
511  break;
512  case RA_ENCODER_STATUS_LIMIT_REACHED:
513  if (avctx->flags & AV_CODEC_FLAG_PASS1) {
514  int sret = get_stats(avctx, 1);
515  if (sret < 0)
516  return sret;
517  }
518  return AVERROR_EOF;
519  case RA_ENCODER_STATUS_ENCODED:
520  goto retry;
521  case RA_ENCODER_STATUS_NEED_MORE_DATA:
522  if (avctx->internal->draining) {
523  av_log(avctx, AV_LOG_ERROR, "Unexpected error when receiving packet after EOF.\n");
524  return AVERROR_EXTERNAL;
525  }
526  return AVERROR(EAGAIN);
527  case RA_ENCODER_STATUS_FAILURE:
528  av_log(avctx, AV_LOG_ERROR, "Could not encode frame: %s\n", rav1e_status_to_str(ret));
529  return AVERROR_EXTERNAL;
530  default:
531  av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_receive_packet: %s\n", ret, rav1e_status_to_str(ret));
532  return AVERROR_UNKNOWN;
533  }
534 
535  ret = ff_get_encode_buffer(avctx, pkt, rpkt->len, 0);
536  if (ret < 0) {
537  av_log(avctx, AV_LOG_ERROR, "Could not allocate packet.\n");
538  rav1e_packet_unref(rpkt);
539  return ret;
540  }
541 
542  memcpy(pkt->data, rpkt->data, rpkt->len);
543 
544  if (rpkt->frame_type == RA_FRAME_TYPE_KEY)
546 
547  pkt->pts = pkt->dts = *((int64_t *) rpkt->opaque);
548  av_free(rpkt->opaque);
549  rav1e_packet_unref(rpkt);
550 
551  if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
552  int ret = av_bsf_send_packet(ctx->bsf, pkt);
553  if (ret < 0) {
554  av_log(avctx, AV_LOG_ERROR, "extradata extraction send failed.\n");
556  return ret;
557  }
558 
559  ret = av_bsf_receive_packet(ctx->bsf, pkt);
560  if (ret < 0) {
561  av_log(avctx, AV_LOG_ERROR, "extradata extraction receive failed.\n");
563  return ret;
564  }
565  }
566 
567  return 0;
568 }
569 
570 #define OFFSET(x) offsetof(librav1eContext, x)
571 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
572 
573 static const AVOption options[] = {
574  { "qp", "use constant quantizer mode", OFFSET(quantizer), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 255, VE },
575  { "speed", "what speed preset to use", OFFSET(speed), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 10, VE },
576  { "tiles", "number of tiles encode with", OFFSET(tiles), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
577  { "tile-rows", "number of tiles rows to encode with", OFFSET(tile_rows), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
578  { "tile-columns", "number of tiles columns to encode with", OFFSET(tile_cols), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
579  { "rav1e-params", "set the rav1e configuration using a :-separated list of key=value parameters", OFFSET(rav1e_opts), AV_OPT_TYPE_DICT, { 0 }, 0, 0, VE },
580  { NULL }
581 };
582 
584  { "b", "0" },
585  { "g", "0" },
586  { "keyint_min", "0" },
587  { "qmax", "-1" },
588  { "qmin", "-1" },
589  { NULL }
590 };
591 
606 };
607 
608 static const AVClass class = {
609  .class_name = "librav1e",
610  .item_name = av_default_item_name,
611  .option = options,
613 };
614 
616  .name = "librav1e",
617  .long_name = NULL_IF_CONFIG_SMALL("librav1e AV1"),
618  .type = AVMEDIA_TYPE_VIDEO,
619  .id = AV_CODEC_ID_AV1,
620  .init = librav1e_encode_init,
621  .receive_packet = librav1e_receive_packet,
622  .close = librav1e_encode_close,
623  .priv_data_size = sizeof(librav1eContext),
624  .priv_class = &class,
630  .wrapper_name = "librav1e",
631 };
OFFSET
#define OFFSET(x)
Definition: librav1e.c:570
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:634
AVCodec
AVCodec.
Definition: codec.h:197
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AVCodecContext::keyint_min
int keyint_min
minimum GOP size
Definition: avcodec.h:1117
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1164
librav1eContext::bsf
AVBSFContext * bsf
Definition: librav1e.c:42
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2573
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
avcodec_parameters_from_context
int avcodec_parameters_from_context(AVCodecParameters *par, const AVCodecContext *codec)
Fill the parameters struct based on the values from the supplied codec context.
Definition: codec_par.c:90
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1645
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
av_bsf_init
int av_bsf_init(AVBSFContext *ctx)
Prepare the filter for use, after all the parameters and options have been set.
Definition: bsf.c:148
librav1eContext::rframe
RaFrame * rframe
Definition: librav1e.c:41
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:27
pixdesc.h
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:1157
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:586
set_stats
static int set_stats(AVCodecContext *avctx)
Definition: librav1e.c:148
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:369
AVOption
AVOption.
Definition: opt.h:248
encode.h
librav1eContext::frame
AVFrame * frame
Definition: librav1e.c:40
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
AV_DICT_IGNORE_SUFFIX
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
Definition: dict.h:70
mathematics.h
filter
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
Definition: filter_design.txt:228
AVDictionary
Definition: dict.c:30
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
AVCodecContext::qmax
int qmax
maximum quantizer
Definition: avcodec.h:1387
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:410
AVBSFContext
The bitstream filter state.
Definition: bsf.h:49
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:31
AV_CODEC_FLAG_GLOBAL_HEADER
#define AV_CODEC_FLAG_GLOBAL_HEADER
Place global headers in extradata instead of every keyframe.
Definition: avcodec.h:329
av_bsf_get_by_name
const AVBitStreamFilter * av_bsf_get_by_name(const char *name)
Definition: bitstream_filters.c:84
AVCodecContext::framerate
AVRational framerate
Definition: avcodec.h:2071
librav1eContext::tiles
int tiles
Definition: librav1e.c:51
librav1eContext::ctx
RaContext * ctx
Definition: librav1e.c:39
AVCodecContext::thread_count
int thread_count
thread count is used to decide how many independent tasks should be passed to execute()
Definition: avcodec.h:1773
defaults
static const AVCodecDefault defaults[]
Definition: amfenc_h264.c:361
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:616
pts
static int64_t pts
Definition: transcode_aac.c:652
AVRational::num
int num
Numerator.
Definition: rational.h:59
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
avassert.h
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:1150
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
av_cold
#define av_cold
Definition: attributes.h:90
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
AV_PIX_FMT_YUVJ422P
@ AV_PIX_FMT_YUVJ422P
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV422P and setting col...
Definition: pixfmt.h:79
av_fast_realloc
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
Definition: mem.c:478
librav1eContext
Definition: librav1e.c:36
AVCodecContext::stats_in
char * stats_in
pass2 encoding statistics input buffer Concatenated stuff from stats_out of pass1 should be placed he...
Definition: avcodec.h:1565
librav1e_encode_init
static av_cold int librav1e_encode_init(AVCodecContext *avctx)
Definition: librav1e.c:183
librav1e_defaults
static const AVCodecDefault librav1e_defaults[]
Definition: librav1e.c:583
librav1eContext::pass_pos
size_t pass_pos
Definition: librav1e.c:45
AVDictionaryEntry::key
char * key
Definition: dict.h:82
AVCodecContext::ticks_per_frame
int ticks_per_frame
For some codecs, the time base is closer to the field rate than the frame rate.
Definition: avcodec.h:668
AV_CODEC_CAP_OTHER_THREADS
#define AV_CODEC_CAP_OTHER_THREADS
Codec supports multithreading through a method other than slice- or frame-level multithreading.
Definition: codec.h:122
librav1eContext::pass_size
int pass_size
Definition: librav1e.c:46
tile_rows
int tile_rows
Definition: h265_levels.c:217
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:309
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
tile_cols
int tile_cols
Definition: h265_levels.c:218
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AV_PIX_FMT_YUVJ444P
@ AV_PIX_FMT_YUVJ444P
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV444P and setting col...
Definition: pixfmt.h:80
AVCodecDefault
Definition: internal.h:222
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
NULL
#define NULL
Definition: coverity.c:32
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1171
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:279
ff_librav1e_encoder
AVCodec ff_librav1e_encoder
Definition: librav1e.c:615
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:607
librav1eContext::rav1e_opts
AVDictionary * rav1e_opts
Definition: librav1e.c:48
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:609
AVCodecContext::internal
struct AVCodecInternal * internal
Private context used for internal data.
Definition: avcodec.h:571
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AVCodecContext::bit_rate
int64_t bit_rate
the average bitrate
Definition: avcodec.h:586
AV_OPT_TYPE_DICT
@ AV_OPT_TYPE_DICT
Definition: opt.h:232
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:235
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
librav1eContext::tile_rows
int tile_rows
Definition: librav1e.c:52
av_base64_decode
int av_base64_decode(uint8_t *out, const char *in_str, int out_size)
Decode a base64-encoded string.
Definition: base64.c:79
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: avcodec.h:1656
base64.h
librav1e_receive_packet
static int librav1e_receive_packet(AVCodecContext *avctx, AVPacket *pkt)
Definition: librav1e.c:431
AVCodecContext::time_base
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
Definition: avcodec.h:659
FF_CODEC_CAP_AUTO_THREADS
#define FF_CODEC_CAP_AUTO_THREADS
Codec handles avctx->thread_count == 0 (auto) internally.
Definition: internal.h:80
AVCodecContext::stats_out
char * stats_out
pass1 encoding statistics output buffer
Definition: avcodec.h:1557
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
AVCodecContext::gop_size
int gop_size
the number of pictures in a group of pictures, or 0 for intra_only
Definition: avcodec.h:731
AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUV422P12
Definition: pixfmt.h:404
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
librav1eContext::pass_data
uint8_t * pass_data
Definition: librav1e.c:44
range_map
static RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
Definition: librav1e.c:56
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:368
AV_CODEC_FLAG_PASS2
#define AV_CODEC_FLAG_PASS2
Use internal 2pass ratecontrol in second pass mode.
Definition: avcodec.h:300
pix_fmt_map
static RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
Definition: librav1e.c:74
librav1eContext::tile_cols
int tile_cols
Definition: librav1e.c:53
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:375
AVChromaLocation
AVChromaLocation
Location of chroma samples.
Definition: pixfmt.h:605
AV_BASE64_SIZE
#define AV_BASE64_SIZE(x)
Calculate the output size needed to base64-encode x bytes to a null-terminated string.
Definition: base64.h:66
i
int i
Definition: input.c:407
av_bsf_receive_packet
int av_bsf_receive_packet(AVBSFContext *ctx, AVPacket *pkt)
Retrieve a filtered packet.
Definition: bsf.c:227
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:362
librav1eContext::speed
int speed
Definition: librav1e.c:50
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: internal.h:49
internal.h
VE
#define VE
Definition: librav1e.c:571
common.h
uint8_t
uint8_t
Definition: audio_convert.c:194
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:553
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:204
AVCodecContext::chroma_sample_location
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:1178
AVCodecContext::height
int height
Definition: avcodec.h:709
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:746
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:569
librav1eContext::quantizer
int quantizer
Definition: librav1e.c:49
avcodec.h
options
static const AVOption options[]
Definition: librav1e.c:573
ret
ret
Definition: filter_design.txt:187
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:72
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
av_bsf_send_packet
int av_bsf_send_packet(AVBSFContext *ctx, AVPacket *pkt)
Submit a packet for filtering.
Definition: bsf.c:201
AVCodecContext
main external API structure.
Definition: avcodec.h:536
AVBitStreamFilter
Definition: bsf.h:98
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:82
AVCodecContext::qmin
int qmin
minimum quantizer
Definition: avcodec.h:1380
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
av_base64_encode
char * av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size)
Encode data to base64 and null-terminate.
Definition: base64.c:143
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:77
chroma_loc_map
static RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
Definition: librav1e.c:97
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
AVCodecInternal::draining
int draining
checks API usage: after codec draining, flush is required to resume operation
Definition: internal.h:185
shift
static int shift(int a, int b)
Definition: sonic.c:82
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:160
librav1e_pix_fmts
enum AVPixelFormat librav1e_pix_fmts[]
Definition: librav1e.c:592
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVDictionaryEntry
Definition: dict.h:81
AVPacket
This structure stores compressed data.
Definition: packet.h:346
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:563
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_bsf_free
void av_bsf_free(AVBSFContext **pctx)
Free a bitstream filter context and everything associated with it; write NULL into the supplied point...
Definition: bsf.c:40
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:709
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
AVDictionaryEntry::value
char * value
Definition: dict.h:83
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:551
av_bsf_alloc
int av_bsf_alloc(const AVBitStreamFilter *filter, AVBSFContext **pctx)
Allocate a context for a given bitstream filter.
Definition: bsf.c:95
librav1e_encode_close
static av_cold int librav1e_encode_close(AVCodecContext *avctx)
Definition: librav1e.c:163
AV_CODEC_FLAG_PASS1
#define AV_CODEC_FLAG_PASS1
Use internal 2pass ratecontrol in first pass mode.
Definition: avcodec.h:296
get_stats
static int get_stats(AVCodecContext *avctx, int eos)
Definition: librav1e.c:109