FFmpeg
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/common.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/log.h"
30 #include "libavutil/pixfmt.h"
31 #include "libavutil/time.h"
32 #include "libavutil/timestamp.h"
33 
34 #include "avcodec.h"
35 #include "decode.h"
36 
37 #include "mediacodec.h"
38 #include "mediacodec_surface.h"
39 #include "mediacodec_sw_buffer.h"
40 #include "mediacodec_wrapper.h"
41 #include "mediacodecdec_common.h"
42 
43 /**
44  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
45  * codec workarounds used in various place are taken from the Gstreamer
46  * project.
47  *
48  * Gstreamer references:
49  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
50  *
51  * Gstreamer copyright notice:
52  *
53  * Copyright (C) 2012, Collabora Ltd.
54  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
55  *
56  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
57  *
58  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
59  *
60  * Copyright (C) 2014-2015, Collabora Ltd.
61  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
62  *
63  * Copyright (C) 2015, Edward Hervey
64  * Author: Edward Hervey <bilboed@gmail.com>
65  *
66  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
67  *
68  * This library is free software; you can redistribute it and/or
69  * modify it under the terms of the GNU Lesser General Public
70  * License as published by the Free Software Foundation
71  * version 2.1 of the License.
72  *
73  * This library is distributed in the hope that it will be useful,
74  * but WITHOUT ANY WARRANTY; without even the implied warranty of
75  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
76  * Lesser General Public License for more details.
77  *
78  * You should have received a copy of the GNU Lesser General Public
79  * License along with this library; if not, write to the Free Software
80  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
81  *
82  */
83 
84 #define INPUT_DEQUEUE_TIMEOUT_US 8000
85 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
86 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
87 
88 enum {
91 };
92 
94 {
95  switch (color_range) {
96  case COLOR_RANGE_FULL:
97  return AVCOL_RANGE_JPEG;
99  return AVCOL_RANGE_MPEG;
100  default:
102  }
103 }
104 
105 enum {
110 };
111 
112 static enum AVColorSpace mcdec_get_color_space(int color_standard)
113 {
114  switch (color_standard) {
116  return AVCOL_SPC_BT709;
118  return AVCOL_SPC_BT470BG;
120  return AVCOL_SPC_SMPTE170M;
122  return AVCOL_SPC_BT2020_NCL;
123  default:
124  return AVCOL_SPC_UNSPECIFIED;
125  }
126 }
127 
128 static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
129 {
130  switch (color_standard) {
132  return AVCOL_PRI_BT709;
134  return AVCOL_PRI_BT470BG;
136  return AVCOL_PRI_SMPTE170M;
138  return AVCOL_PRI_BT2020;
139  default:
140  return AVCOL_PRI_UNSPECIFIED;
141  }
142 }
143 
144 enum {
149 };
150 
151 static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
152 {
153  switch (color_transfer) {
155  return AVCOL_TRC_LINEAR;
157  return AVCOL_TRC_SMPTE170M;
159  return AVCOL_TRC_SMPTEST2084;
160  case COLOR_TRANSFER_HLG:
161  return AVCOL_TRC_ARIB_STD_B67;
162  default:
163  return AVCOL_TRC_UNSPECIFIED;
164  }
165 }
166 
167 enum {
177 };
178 
179 static const struct {
180 
183 
184 } color_formats[] = {
185 
193  { 0 }
194 };
195 
198  int color_format)
199 {
200  int i;
202 
203  if (s->surface) {
204  return AV_PIX_FMT_MEDIACODEC;
205  }
206 
207  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
209  }
210 
211  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
213  return color_formats[i].pix_fmt;
214  }
215  }
216 
217  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
219 
220  return ret;
221 }
222 
224 {
225  atomic_fetch_add(&s->refcount, 1);
226 }
227 
229 {
230  if (!s)
231  return;
232 
233  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
234  if (s->codec) {
235  ff_AMediaCodec_delete(s->codec);
236  s->codec = NULL;
237  }
238 
239  if (s->format) {
240  ff_AMediaFormat_delete(s->format);
241  s->format = NULL;
242  }
243 
244  if (s->surface) {
246  s->surface = NULL;
247  }
248 
249  av_freep(&s->codec_name);
250  av_freep(&s);
251  }
252 }
253 
254 static void mediacodec_buffer_release(void *opaque, uint8_t *data)
255 {
256  AVMediaCodecBuffer *buffer = opaque;
258  int released = atomic_load(&buffer->released);
259 
260  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
261  atomic_fetch_sub(&ctx->hw_buffer_count, 1);
262  av_log(ctx->avctx, AV_LOG_DEBUG,
263  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
264  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
265  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
266  }
267 
269  av_freep(&buffer);
270 }
271 
274  ssize_t index,
276  AVFrame *frame)
277 {
278  int ret = 0;
279  int status = 0;
280  AVMediaCodecBuffer *buffer = NULL;
281 
282  frame->buf[0] = NULL;
283  frame->width = avctx->width;
284  frame->height = avctx->height;
285  frame->format = avctx->pix_fmt;
286  frame->sample_aspect_ratio = avctx->sample_aspect_ratio;
287 
288  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
289  frame->pts = av_rescale_q(info->presentationTimeUs,
291  avctx->pkt_timebase);
292  } else {
293  frame->pts = info->presentationTimeUs;
294  }
295  frame->pkt_dts = AV_NOPTS_VALUE;
296  frame->color_range = avctx->color_range;
297  frame->color_primaries = avctx->color_primaries;
298  frame->color_trc = avctx->color_trc;
299  frame->colorspace = avctx->colorspace;
300 
301  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
302  if (!buffer) {
303  ret = AVERROR(ENOMEM);
304  goto fail;
305  }
306 
307  atomic_init(&buffer->released, 0);
308 
309  frame->buf[0] = av_buffer_create(NULL,
310  0,
312  buffer,
314 
315  if (!frame->buf[0]) {
316  ret = AVERROR(ENOMEM);
317  goto fail;
318 
319  }
320 
321  buffer->ctx = s;
322  buffer->serial = atomic_load(&s->serial);
324 
325  buffer->index = index;
326  buffer->pts = info->presentationTimeUs;
327 
328  frame->data[3] = (uint8_t *)buffer;
329 
330  atomic_fetch_add(&s->hw_buffer_count, 1);
331  av_log(avctx, AV_LOG_DEBUG,
332  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
333  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
334 
335  return 0;
336 fail:
337  av_freep(&buffer);
339  if (status < 0) {
340  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
342  }
343 
344  return ret;
345 }
346 
349  uint8_t *data,
350  size_t size,
351  ssize_t index,
353  AVFrame *frame)
354 {
355  int ret = 0;
356  int status = 0;
357 
358  frame->width = avctx->width;
359  frame->height = avctx->height;
360  frame->format = avctx->pix_fmt;
361 
362  /* MediaCodec buffers needs to be copied to our own refcounted buffers
363  * because the flush command invalidates all input and output buffers.
364  */
365  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
366  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
367  goto done;
368  }
369 
370  /* Override frame->pkt_pts as ff_get_buffer will override its value based
371  * on the last avpacket received which is not in sync with the frame:
372  * * N avpackets can be pushed before 1 frame is actually returned
373  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
374  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
375  frame->pts = av_rescale_q(info->presentationTimeUs,
377  avctx->pkt_timebase);
378  } else {
379  frame->pts = info->presentationTimeUs;
380  }
381  frame->pkt_dts = AV_NOPTS_VALUE;
382 
383  av_log(avctx, AV_LOG_TRACE,
384  "Frame: width=%d stride=%d height=%d slice-height=%d "
385  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
386  "destination linesizes=%d,%d,%d\n" ,
387  avctx->width, s->stride, avctx->height, s->slice_height,
388  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
389  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
390 
391  switch (s->color_format) {
394  break;
399  break;
403  break;
406  break;
407  default:
408  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
409  s->color_format, s->color_format);
410  ret = AVERROR(EINVAL);
411  goto done;
412  }
413 
414  ret = 0;
415 done:
417  if (status < 0) {
418  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
420  }
421 
422  return ret;
423 }
424 
425 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
426  int32_t value = 0; \
427  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
428  (name) = value; \
429  } else if (mandatory) { \
430  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
431  ret = AVERROR_EXTERNAL; \
432  goto fail; \
433  } \
434 } while (0) \
435 
437 {
438  int ret = 0;
439  int width = 0;
440  int height = 0;
441  int color_range = 0;
442  int color_standard = 0;
443  int color_transfer = 0;
444  char *format = NULL;
445 
446  if (!s->format) {
447  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
448  return AVERROR(EINVAL);
449  }
450 
451  format = ff_AMediaFormat_toString(s->format);
452  if (!format) {
453  return AVERROR_EXTERNAL;
454  }
455  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
456 
457  /* Mandatory fields */
458  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
459  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
460 
461  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
462  s->stride = s->stride > 0 ? s->stride : s->width;
463 
464  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
465 
466  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
467  s->slice_height = FFALIGN(s->height, 16);
468  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
469  s->slice_height = avctx->height;
470  s->stride = avctx->width;
471  } else if (s->slice_height == 0) {
472  s->slice_height = s->height;
473  }
474 
475  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
476  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
477  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
478  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
479  ret = AVERROR(EINVAL);
480  goto fail;
481  }
482 
483  /* Optional fields */
484  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
485  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
486  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
487  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
488 
489  // Try "crop" for NDK
490  if (!(s->crop_right && s->crop_bottom) && s->use_ndk_codec)
491  ff_AMediaFormat_getRect(s->format, "crop", &s->crop_left, &s->crop_top, &s->crop_right, &s->crop_bottom);
492 
493  if (s->crop_right && s->crop_bottom) {
494  width = s->crop_right + 1 - s->crop_left;
495  height = s->crop_bottom + 1 - s->crop_top;
496  } else {
497  /* TODO: NDK MediaFormat should try getRect() first.
498  * Try crop-width/crop-height, it works on NVIDIA Shield.
499  */
500  AMEDIAFORMAT_GET_INT32(width, "crop-width", 0);
501  AMEDIAFORMAT_GET_INT32(height, "crop-height", 0);
502  }
503  if (!width || !height) {
504  width = s->width;
505  height = s->height;
506  }
507 
508  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
509  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
510 
511  if (s->display_width && s->display_height) {
512  AVRational sar = av_div_q(
513  (AVRational){ s->display_width, s->display_height },
514  (AVRational){ width, height });
515  ff_set_sar(avctx, sar);
516  }
517 
518  AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0);
519  if (color_range)
521 
522  AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0);
523  if (color_standard) {
524  avctx->colorspace = mcdec_get_color_space(color_standard);
525  avctx->color_primaries = mcdec_get_color_pri(color_standard);
526  }
527 
528  AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0);
529  if (color_transfer)
530  avctx->color_trc = mcdec_get_color_trc(color_transfer);
531 
532  av_log(avctx, AV_LOG_INFO,
533  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
534  "resulting dimensions width=%d height=%d\n",
535  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
536  width, height);
537 
538  av_freep(&format);
539  return ff_set_dimensions(avctx, width, height);
540 fail:
541  av_freep(&format);
542  return ret;
543 }
544 
546 {
547  FFAMediaCodec *codec = s->codec;
548  int status;
549 
550  s->output_buffer_count = 0;
551 
552  s->draining = 0;
553  s->flushing = 0;
554  s->eos = 0;
555  atomic_fetch_add(&s->serial, 1);
556  atomic_init(&s->hw_buffer_count, 0);
557  s->current_input_buffer = -1;
558 
559  status = ff_AMediaCodec_flush(codec);
560  if (status < 0) {
561  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
562  return AVERROR_EXTERNAL;
563  }
564 
565  return 0;
566 }
567 
569  const char *mime, FFAMediaFormat *format)
570 {
571  int ret = 0;
572  int status;
573  int profile;
574 
575  enum AVPixelFormat pix_fmt;
576  static const enum AVPixelFormat pix_fmts[] = {
579  };
580 
581  s->avctx = avctx;
582  atomic_init(&s->refcount, 1);
583  atomic_init(&s->hw_buffer_count, 0);
584  atomic_init(&s->serial, 1);
585  s->current_input_buffer = -1;
586 
587  pix_fmt = ff_get_format(avctx, pix_fmts);
589  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
590 
591  if (avctx->hw_device_ctx) {
592  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
593  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
594  if (device_ctx->hwctx) {
595  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
596  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, mediacodec_ctx->native_window, avctx);
597  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
598  }
599  }
600  }
601 
602  if (!s->surface && user_ctx && user_ctx->surface) {
603  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, NULL, avctx);
604  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
605  }
606  }
607 
609  if (profile < 0) {
610  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
611  }
612 
613  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
614  if (!s->codec_name) {
615  // getCodecNameByType() can fail due to missing JVM, while NDK
616  // mediacodec can be used without JVM.
617  if (!s->use_ndk_codec) {
619  goto fail;
620  }
621  av_log(avctx, AV_LOG_INFO, "Failed to getCodecNameByType\n");
622  } else {
623  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
624  }
625 
626  if (s->codec_name)
627  s->codec = ff_AMediaCodec_createCodecByName(s->codec_name, s->use_ndk_codec);
628  else {
629  s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec);
630  if (s->codec) {
631  s->codec_name = ff_AMediaCodec_getName(s->codec);
632  if (!s->codec_name)
633  s->codec_name = av_strdup(mime);
634  }
635  }
636  if (!s->codec) {
637  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
639  goto fail;
640  }
641 
642  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
643  if (status < 0) {
645  av_log(avctx, AV_LOG_ERROR,
646  "Failed to configure codec %s (status = %d) with format %s\n",
647  s->codec_name, status, desc);
648  av_freep(&desc);
649 
651  goto fail;
652  }
653 
654  status = ff_AMediaCodec_start(s->codec);
655  if (status < 0) {
657  av_log(avctx, AV_LOG_ERROR,
658  "Failed to start codec %s (status = %d) with format %s\n",
659  s->codec_name, status, desc);
660  av_freep(&desc);
662  goto fail;
663  }
664 
665  s->format = ff_AMediaCodec_getOutputFormat(s->codec);
666  if (s->format) {
667  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
668  av_log(avctx, AV_LOG_ERROR,
669  "Failed to configure context\n");
670  goto fail;
671  }
672  }
673 
674  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
675 
676  return 0;
677 
678 fail:
679  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
680  ff_mediacodec_dec_close(avctx, s);
681  return ret;
682 }
683 
685  AVPacket *pkt, bool wait)
686 {
687  int offset = 0;
688  int need_draining = 0;
689  uint8_t *data;
690  size_t size;
691  FFAMediaCodec *codec = s->codec;
692  int status;
693  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
694  int64_t pts;
695 
696  if (s->flushing) {
697  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
698  "until all output buffers have been released\n");
699  return AVERROR_EXTERNAL;
700  }
701 
702  if (pkt->size == 0) {
703  need_draining = 1;
704  }
705 
706  if (s->draining && s->eos) {
707  return AVERROR_EOF;
708  }
709 
710  while (offset < pkt->size || (need_draining && !s->draining)) {
711  ssize_t index = s->current_input_buffer;
712  if (index < 0) {
713  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
715  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
716  break;
717  }
718 
719  if (index < 0) {
720  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
721  return AVERROR_EXTERNAL;
722  }
723  }
724  s->current_input_buffer = -1;
725 
727  if (!data) {
728  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
729  return AVERROR_EXTERNAL;
730  }
731 
732  pts = pkt->pts;
733  if (pts == AV_NOPTS_VALUE) {
734  av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n");
735  pts = 0;
736  }
737  if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
739  }
740 
741  if (need_draining) {
743 
744  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
745 
747  if (status < 0) {
748  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
749  return AVERROR_EXTERNAL;
750  }
751 
752  av_log(avctx, AV_LOG_TRACE,
753  "Queued empty EOS input buffer %zd with flags=%d\n", index, flags);
754 
755  s->draining = 1;
756  return 0;
757  }
758 
759  size = FFMIN(pkt->size - offset, size);
760  memcpy(data, pkt->data + offset, size);
761  offset += size;
762 
764  if (status < 0) {
765  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
766  return AVERROR_EXTERNAL;
767  }
768 
769  av_log(avctx, AV_LOG_TRACE,
770  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
771  }
772 
773  if (offset == 0)
774  return AVERROR(EAGAIN);
775  return offset;
776 }
777 
779  AVFrame *frame, bool wait)
780 {
781  int ret;
782  uint8_t *data;
783  ssize_t index;
784  size_t size;
785  FFAMediaCodec *codec = s->codec;
787  int status;
788  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
789 
790  if (s->draining && s->eos) {
791  return AVERROR_EOF;
792  }
793 
794  if (s->draining) {
795  /* If the codec is flushing or need to be flushed, block for a fair
796  * amount of time to ensure we got a frame */
797  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
798  } else if (s->output_buffer_count == 0 || !wait) {
799  /* If the codec hasn't produced any frames, do not block so we
800  * can push data to it as fast as possible, and get the first
801  * frame */
802  output_dequeue_timeout_us = 0;
803  }
804 
805  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
806  if (index >= 0) {
807  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
808  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
809  " flags=%" PRIu32 "\n", index, info.offset, info.size,
810  info.presentationTimeUs, info.flags);
811 
812  if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) {
813  s->eos = 1;
814  }
815 
816  if (info.size) {
817  if (s->surface) {
818  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
819  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
820  return ret;
821  }
822  } else {
824  if (!data) {
825  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
826  return AVERROR_EXTERNAL;
827  }
828 
829  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
830  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
831  return ret;
832  }
833  }
834 
835  s->output_buffer_count++;
836  return 0;
837  } else {
839  if (status < 0) {
840  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
841  }
842  }
843 
844  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
845  char *format = NULL;
846 
847  if (s->format) {
848  status = ff_AMediaFormat_delete(s->format);
849  if (status < 0) {
850  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
851  }
852  }
853 
854  s->format = ff_AMediaCodec_getOutputFormat(codec);
855  if (!s->format) {
856  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
857  return AVERROR_EXTERNAL;
858  }
859 
860  format = ff_AMediaFormat_toString(s->format);
861  if (!format) {
862  return AVERROR_EXTERNAL;
863  }
864  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
865  av_freep(&format);
866 
867  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
868  return ret;
869  }
870 
871  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
873  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
874  if (s->draining) {
875  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
876  "while draining remaining frames, output will probably lack frames\n",
877  output_dequeue_timeout_us / 1000);
878  } else {
879  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
880  }
881  } else {
882  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
883  return AVERROR_EXTERNAL;
884  }
885 
886  return AVERROR(EAGAIN);
887 }
888 
889 /*
890 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
891 * the codec (because the user retains frames). The codec stays in the
892 * flushing state.
893 *
894 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
895 * performed on the codec. The codec leaves the flushing state and can
896 * process again packets.
897 *
898 * ff_mediacodec_dec_flush returns a negative value if an error has
899 * occurred.
900 */
902 {
903  if (!s->surface || !s->delay_flush || atomic_load(&s->refcount) == 1) {
904  int ret;
905 
906  /* No frames (holding a reference to the codec) are retained by the
907  * user, thus we can flush the codec and returns accordingly */
908  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
909  return ret;
910  }
911 
912  return 1;
913  }
914 
915  s->flushing = 1;
916  return 0;
917 }
918 
920 {
922 
923  return 0;
924 }
925 
927 {
928  return s->flushing;
929 }
ff_AMediaCodec_getInputBuffer
static uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:261
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
AVCodecContext::hwaccel_context
void * hwaccel_context
Legacy hardware accelerator context.
Definition: avcodec.h:1429
ff_AMediaCodecList_getCodecNameByType
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
Definition: mediacodec_wrapper.c:365
ff_AMediaFormat_delete
static int ff_AMediaFormat_delete(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:92
MediaCodecDecContext
Definition: mediacodecdec_common.h:37
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
ff_AMediaCodec_delete
static int ff_AMediaCodec_delete(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:256
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
color_formats
static const struct @109 color_formats[]
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1002
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:558
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1147
ff_AMediaCodec_start
static int ff_AMediaCodec_start(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:241
mediacodec_surface.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVCOL_TRC_LINEAR
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:567
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
COLOR_FormatYUV420Planar
@ COLOR_FormatYUV420Planar
Definition: mediacodecdec_common.c:168
AV_TIME_BASE_Q
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:260
AVMediaCodecDeviceContext::surface
void * surface
android/view/Surface handle, to be filled by the user.
Definition: hwcontext_mediacodec.h:33
OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
Definition: mediacodecdec_common.c:86
ff_mediacodec_dec_close
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:919
ff_AMediaFormat_getRect
static int ff_AMediaFormat_getRect(FFAMediaFormat *format, const char *name, int32_t *left, int32_t *top, int32_t *right, int32_t *bottom)
Definition: mediacodec_wrapper.h:127
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:437
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:995
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:661
ff_mediacodec_dec_receive
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
Definition: mediacodecdec_common.c:778
AVPacket::data
uint8_t * data
Definition: packet.h:374
ff_AMediaCodec_infoOutputFormatChanged
static int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:311
OUTPUT_DEQUEUE_TIMEOUT_US
#define OUTPUT_DEQUEUE_TIMEOUT_US
Definition: mediacodecdec_common.c:85
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:561
ff_AMediaCodec_infoOutputBuffersChanged
static int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:306
data
const char data[16]
Definition: mxf.c:146
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
ff_AMediaCodec_queueInputBuffer
static int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
Definition: mediacodec_wrapper.h:276
COLOR_STANDARD_BT601_NTSC
@ COLOR_STANDARD_BT601_NTSC
Definition: mediacodecdec_common.c:108
ff_mediacodec_dec_is_flushing
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:926
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:533
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:91
hwcontext_mediacodec.h
COLOR_FormatYCbYCr
@ COLOR_FormatYCbYCr
Definition: mediacodecdec_common.c:170
ff_AMediaCodec_configure
static int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, FFANativeWindow *surface, void *crypto, uint32_t flags)
Definition: mediacodec_wrapper.h:233
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:593
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:272
fail
#define fail()
Definition: checkasm.h:134
ff_mediacodec_dec_flush
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:901
pts
static int64_t pts
Definition: transcode_aac.c:653
atomic_fetch_sub
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:137
AVRational::num
int num
Numerator.
Definition: rational.h:59
mediacodecdec_common.h
INPUT_DEQUEUE_TIMEOUT_US
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
Definition: mediacodecdec_common.c:84
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:988
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:206
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
mcdec_map_color_format
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
Definition: mediacodecdec_common.c:196
ff_AMediaCodec_getName
static char * ff_AMediaCodec_getName(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:224
ff_AMediaCodec_getBufferFlagEndOfStream
static int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:321
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:256
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
COLOR_TRANSFER_HLG
@ COLOR_TRANSFER_HLG
Definition: mediacodecdec_common.c:148
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:594
info
MIPS optimizations info
Definition: mips.txt:2
COLOR_STANDARD_BT601_PAL
@ COLOR_STANDARD_BT601_PAL
Definition: mediacodecdec_common.c:107
COLOR_STANDARD_BT709
@ COLOR_STANDARD_BT709
Definition: mediacodecdec_common.c:106
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
decode.h
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:181
color_range
color_range
Definition: vf_selectivecolor.c:44
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AVCOL_PRI_UNSPECIFIED
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:536
AVCOL_PRI_BT470BG
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:540
mcdec_get_color_range
static enum AVColorRange mcdec_get_color_range(int color_range)
Definition: mediacodecdec_common.c:93
AV_PIX_FMT_MEDIACODEC
@ AV_PIX_FMT_MEDIACODEC
hardware decoding through MediaCodec
Definition: pixfmt.h:313
AVCOL_PRI_SMPTE170M
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:541
COLOR_QCOM_FormatYUV420SemiPlanar32m
@ COLOR_QCOM_FormatYUV420SemiPlanar32m
Definition: mediacodecdec_common.c:173
COLOR_STANDARD_BT2020
@ COLOR_STANDARD_BT2020
Definition: mediacodecdec_common.c:109
ff_AMediaCodec_getOutputFormat
static FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:286
ff_AMediaCodec_createCodecByName
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name, int ndk)
Definition: mediacodec_wrapper.c:2492
NULL
#define NULL
Definition: coverity.c:32
ff_AMediaCodec_flush
static int ff_AMediaCodec_flush(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:251
FFAMediaCodecBufferInfo
Definition: mediacodec_wrapper.h:172
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1009
AVMediaCodecContext
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:535
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:106
mediacodec_sw_buffer.h
ff_mediacodec_surface_unref
int ff_mediacodec_surface_unref(FFANativeWindow *window, void *log_ctx)
Definition: mediacodec_surface.c:59
time.h
ff_mediacodec_dec_ref
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:223
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:627
COLOR_TRANSFER_SDR_VIDEO
@ COLOR_TRANSFER_SDR_VIDEO
Definition: mediacodecdec_common.c:146
COLOR_TRANSFER_LINEAR
@ COLOR_TRANSFER_LINEAR
Definition: mediacodecdec_common.c:145
index
int index
Definition: gxfenc.c:89
COLOR_FormatAndroidOpaque
@ COLOR_FormatAndroidOpaque
Definition: mediacodecdec_common.c:171
AVMediaCodecDeviceContext
MediaCodec details.
Definition: hwcontext_mediacodec.h:27
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
ff_AMediaFormat_toString
static char * ff_AMediaFormat_toString(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:97
AVCOL_PRI_BT2020
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:544
AMEDIAFORMAT_GET_INT32
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
Definition: mediacodecdec_common.c:425
ff_mediacodec_sw_buffer_copy_yuv420_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:131
mediacodec_dec_parse_format
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:436
ff_mediacodec_sw_buffer_copy_yuv420_planar
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
Definition: mediacodec_sw_buffer.c:76
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1473
AVPacket::size
int size
Definition: packet.h:375
COLOR_FormatYUV420SemiPlanar
@ COLOR_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:169
AVCOL_TRC_SMPTEST2084
@ AVCOL_TRC_SMPTEST2084
Definition: pixfmt.h:576
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:1764
size
int size
Definition: twinvq_data.h:10344
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
COLOR_QCOM_FormatYUV420SemiPlanar
@ COLOR_QCOM_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:172
pix_fmt
enum AVPixelFormat pix_fmt
Definition: mediacodecdec_common.c:182
height
#define height
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
ff_AMediaCodecProfile_getProfileFromAVCodecContext
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
Definition: mediacodec_wrapper.c:303
COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
@ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
Definition: mediacodecdec_common.c:176
ff_AMediaCodec_createDecoderByType
FFAMediaCodec * ff_AMediaCodec_createDecoderByType(const char *mime_type, int ndk)
Definition: mediacodec_wrapper.c:2499
mediacodec_dec_flush_codec
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:545
mediacodec_buffer_release
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
Definition: mediacodecdec_common.c:254
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
mediacodec_wrap_sw_buffer
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:347
COLOR_RANGE_FULL
@ COLOR_RANGE_FULL
Definition: mediacodecdec_common.c:89
ff_mediacodec_dec_unref
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:228
mcdec_get_color_space
static enum AVColorSpace mcdec_get_color_space(int color_standard)
Definition: mediacodecdec_common.c:112
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
mediacodec_wrapper.h
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:367
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:598
FFAMediaCodec
Definition: mediacodec_wrapper.h:181
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:587
mcdec_get_color_trc
static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
Definition: mediacodecdec_common.c:151
common.h
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1928
COLOR_TI_FormatYUV420PackedSemiPlanar
@ COLOR_TI_FormatYUV420PackedSemiPlanar
Definition: mediacodecdec_common.c:175
AVMediaCodecContext::surface
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
ff_mediacodec_surface_ref
FFANativeWindow * ff_mediacodec_surface_ref(void *surface, void *native_window, void *log_ctx)
Definition: mediacodec_surface.c:30
profile
int profile
Definition: mxfenc.c:2009
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:590
AVCodecContext::height
int height
Definition: avcodec.h:598
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:635
AVMediaCodecDeviceContext::native_window
void * native_window
Pointer to ANativeWindow.
Definition: hwcontext_mediacodec.h:45
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:644
ff_AMediaCodec_dequeueInputBuffer
static ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:271
avcodec.h
mcdec_get_color_pri
static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
Definition: mediacodecdec_common.c:128
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVCodecContext
main external API structure.
Definition: avcodec.h:426
AVCOL_TRC_ARIB_STD_B67
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:579
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
COLOR_RANGE_LIMITED
@ COLOR_RANGE_LIMITED
Definition: mediacodecdec_common.c:90
COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
@ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
Definition: mediacodecdec_common.c:174
AVCOL_TRC_SMPTE170M
@ AVCOL_TRC_SMPTE170M
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Definition: pixfmt.h:565
color_format
int color_format
Definition: mediacodecdec_common.c:181
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:270
desc
const char * desc
Definition: libsvtav1.c:83
mem.h
ff_AMediaCodec_getOutputBuffer
static uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:266
COLOR_TRANSFER_ST2084
@ COLOR_TRANSFER_ST2084
Definition: mediacodecdec_common.c:147
mediacodec_wrap_hw_buffer
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:272
ff_AMediaCodec_cleanOutputBuffers
static int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:336
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVPacket
This structure stores compressed data.
Definition: packet.h:351
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
ff_AMediaCodec_infoTryAgainLater
static int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:301
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:598
ff_mediacodec_dec_send
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
Definition: mediacodecdec_common.c:684
timestamp.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
ff_AMediaCodec_releaseOutputBuffer
static int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
Definition: mediacodec_wrapper.h:291
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:589
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:626
FFAMediaFormat
Definition: mediacodec_wrapper.h:63
ff_AMediaCodec_dequeueOutputBuffer
static ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:281
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:795
mediacodec.h
ff_mediacodec_dec_init
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:568