FFmpeg
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/avassert.h"
27 #include "libavutil/common.h"
29 #include "libavutil/mem.h"
30 #include "libavutil/log.h"
31 #include "libavutil/pixfmt.h"
32 #include "libavutil/time.h"
33 #include "libavutil/timestamp.h"
35 
36 #include "avcodec.h"
37 #include "decode.h"
38 
39 #include "mediacodec.h"
40 #include "mediacodec_surface.h"
41 #include "mediacodec_sw_buffer.h"
42 #include "mediacodec_wrapper.h"
43 #include "mediacodecdec_common.h"
44 
45 /**
46  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
47  * codec workarounds used in various place are taken from the Gstreamer
48  * project.
49  *
50  * Gstreamer references:
51  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
52  *
53  * Gstreamer copyright notice:
54  *
55  * Copyright (C) 2012, Collabora Ltd.
56  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
57  *
58  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
59  *
60  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
61  *
62  * Copyright (C) 2014-2015, Collabora Ltd.
63  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
64  *
65  * Copyright (C) 2015, Edward Hervey
66  * Author: Edward Hervey <bilboed@gmail.com>
67  *
68  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
69  *
70  * This library is free software; you can redistribute it and/or
71  * modify it under the terms of the GNU Lesser General Public
72  * License as published by the Free Software Foundation
73  * version 2.1 of the License.
74  *
75  * This library is distributed in the hope that it will be useful,
76  * but WITHOUT ANY WARRANTY; without even the implied warranty of
77  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
78  * Lesser General Public License for more details.
79  *
80  * You should have received a copy of the GNU Lesser General Public
81  * License along with this library; if not, write to the Free Software
82  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
83  *
84  */
85 
86 #define INPUT_DEQUEUE_TIMEOUT_US 8000
87 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
88 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
89 
90 enum {
91  ENCODING_PCM_16BIT = 0x00000002,
92  ENCODING_PCM_8BIT = 0x00000003,
93  ENCODING_PCM_FLOAT = 0x00000004,
95  ENCODING_PCM_32BIT = 0x00000016,
96 };
97 
98 static const struct {
99 
102 
103 } sample_formats[] = {
104 
109  { 0 }
110 };
111 
114  int pcm_format)
115 {
117 
118  for (int i = 0; i < FF_ARRAY_ELEMS(sample_formats); i++) {
120  return sample_formats[i].sample_format;
121  }
122  }
123 
124  av_log(avctx, AV_LOG_ERROR, "Output sample format 0x%x (value=%d) is not supported\n",
126 
127  return ret;
128 }
129 
130 enum
131 {
150 };
151 
152 static const struct {
153 
154  int mask;
155  uint64_t layout;
156 
157 } channel_masks[] = {
176 };
177 
178 static uint64_t mcdec_map_channel_mask(AVCodecContext *avctx,
179  int channel_mask)
180 {
181  uint64_t channel_layout = 0;
182 
183  for (int i = 0; i < FF_ARRAY_ELEMS(channel_masks); i++) {
184  if (channel_mask & channel_masks[i].mask)
185  channel_layout |= channel_masks[i].layout;
186  }
187 
188  return channel_layout;
189 }
190 
191 enum {
201 };
202 
203 static const struct {
204 
207 
208 } color_formats[] = {
209 
217  { 0 }
218 };
219 
222  int color_format)
223 {
224  int i;
226 
227  if (s->surface) {
228  return AV_PIX_FMT_MEDIACODEC;
229  }
230 
231  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
233  }
234 
235  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
237  return color_formats[i].pix_fmt;
238  }
239  }
240 
241  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
243 
244  return ret;
245 }
246 
248 {
249  atomic_fetch_add(&s->refcount, 1);
250 }
251 
253 {
254  if (!s)
255  return;
256 
257  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
258  if (s->codec) {
259  ff_AMediaCodec_delete(s->codec);
260  s->codec = NULL;
261  }
262 
263  if (s->format) {
264  ff_AMediaFormat_delete(s->format);
265  s->format = NULL;
266  }
267 
268  if (s->surface) {
270  s->surface = NULL;
271  }
272 
273  av_freep(&s->codec_name);
274  av_freep(&s);
275  }
276 }
277 
278 static void mediacodec_buffer_release(void *opaque, uint8_t *data)
279 {
280  AVMediaCodecBuffer *buffer = opaque;
282  int released = atomic_load(&buffer->released);
283 
284  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
285  atomic_fetch_sub(&ctx->hw_buffer_count, 1);
286  av_log(ctx->avctx, AV_LOG_DEBUG,
287  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
288  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
289  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
290  }
291 
293  av_freep(&buffer);
294 }
295 
298  ssize_t index,
300  AVFrame *frame)
301 {
302  int ret = 0;
303  int status = 0;
304  AVMediaCodecBuffer *buffer = NULL;
305 
306  frame->buf[0] = NULL;
307  frame->width = avctx->width;
308  frame->height = avctx->height;
309  frame->format = avctx->pix_fmt;
310  frame->sample_aspect_ratio = avctx->sample_aspect_ratio;
311 
312  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
313  frame->pts = av_rescale_q(info->presentationTimeUs,
315  avctx->pkt_timebase);
316  } else {
317  frame->pts = info->presentationTimeUs;
318  }
319  frame->pkt_dts = AV_NOPTS_VALUE;
320  frame->color_range = avctx->color_range;
321  frame->color_primaries = avctx->color_primaries;
322  frame->color_trc = avctx->color_trc;
323  frame->colorspace = avctx->colorspace;
324 
325  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
326  if (!buffer) {
327  ret = AVERROR(ENOMEM);
328  goto fail;
329  }
330 
331  atomic_init(&buffer->released, 0);
332 
333  frame->buf[0] = av_buffer_create(NULL,
334  0,
336  buffer,
338 
339  if (!frame->buf[0]) {
340  ret = AVERROR(ENOMEM);
341  goto fail;
342 
343  }
344 
345  buffer->ctx = s;
346  buffer->serial = atomic_load(&s->serial);
348 
349  buffer->index = index;
350  buffer->pts = info->presentationTimeUs;
351 
352  frame->data[3] = (uint8_t *)buffer;
353 
354  atomic_fetch_add(&s->hw_buffer_count, 1);
355  av_log(avctx, AV_LOG_DEBUG,
356  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
357  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
358 
359  return 0;
360 fail:
361  av_freep(&buffer);
363  if (status < 0) {
364  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
366  }
367 
368  return ret;
369 }
370 
373  uint8_t *data,
374  size_t size,
375  ssize_t index,
377  AVFrame *frame)
378 {
379  int ret = 0;
380  int status = 0;
381  const int sample_size = av_get_bytes_per_sample(avctx->sample_fmt);
382  if (!sample_size) {
383  av_log(avctx, AV_LOG_ERROR, "Could not get bytes per sample\n");
384  ret = AVERROR(ENOSYS);
385  goto done;
386  }
387 
388  if (info->size % (sample_size * avctx->ch_layout.nb_channels)) {
389  av_log(avctx, AV_LOG_ERROR, "input is not a multiple of channels * sample_size\n");
390  ret = AVERROR(EINVAL);
391  goto done;
392  }
393 
394  frame->format = avctx->sample_fmt;
395  frame->sample_rate = avctx->sample_rate;
396  frame->nb_samples = info->size / (sample_size * avctx->ch_layout.nb_channels);
397 
398  ret = av_channel_layout_copy(&frame->ch_layout, &avctx->ch_layout);
399  if (ret < 0) {
400  av_log(avctx, AV_LOG_ERROR, "Could not copy channel layout\n");
401  goto done;
402  }
403 
404  /* MediaCodec buffers needs to be copied to our own refcounted buffers
405  * because the flush command invalidates all input and output buffers.
406  */
407  ret = ff_get_buffer(avctx, frame, 0);
408  if (ret < 0) {
409  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
410  goto done;
411  }
412 
413  /* Override frame->pts as ff_get_buffer will override its value based
414  * on the last avpacket received which is not in sync with the frame:
415  * * N avpackets can be pushed before 1 frame is actually returned
416  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
417  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
418  frame->pts = av_rescale_q(info->presentationTimeUs,
420  avctx->pkt_timebase);
421  } else {
422  frame->pts = info->presentationTimeUs;
423  }
424  frame->pkt_dts = AV_NOPTS_VALUE;
425  frame->flags |= AV_FRAME_FLAG_KEY;
426 
427  av_log(avctx, AV_LOG_TRACE,
428  "Frame: format=%d channels=%d sample_rate=%d nb_samples=%d",
429  avctx->sample_fmt, avctx->ch_layout.nb_channels, avctx->sample_rate, frame->nb_samples);
430 
431  memcpy(frame->data[0], data, info->size);
432 
433  ret = 0;
434 done:
436  if (status < 0) {
437  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
439  }
440 
441  return ret;
442 }
443 
446  uint8_t *data,
447  size_t size,
448  ssize_t index,
450  AVFrame *frame)
451 {
452  int ret = 0;
453  int status = 0;
454 
455  frame->width = avctx->width;
456  frame->height = avctx->height;
457  frame->format = avctx->pix_fmt;
458 
459  /* MediaCodec buffers needs to be copied to our own refcounted buffers
460  * because the flush command invalidates all input and output buffers.
461  */
462  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
463  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
464  goto done;
465  }
466 
467  /* Override frame->pkt_pts as ff_get_buffer will override its value based
468  * on the last avpacket received which is not in sync with the frame:
469  * * N avpackets can be pushed before 1 frame is actually returned
470  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
471  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
472  frame->pts = av_rescale_q(info->presentationTimeUs,
474  avctx->pkt_timebase);
475  } else {
476  frame->pts = info->presentationTimeUs;
477  }
478  frame->pkt_dts = AV_NOPTS_VALUE;
479 
480  av_log(avctx, AV_LOG_TRACE,
481  "Frame: width=%d stride=%d height=%d slice-height=%d "
482  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
483  "destination linesizes=%d,%d,%d\n" ,
484  avctx->width, s->stride, avctx->height, s->slice_height,
485  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
486  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
487 
488  switch (s->color_format) {
491  break;
496  break;
500  break;
503  break;
504  default:
505  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
506  s->color_format, s->color_format);
507  ret = AVERROR(EINVAL);
508  goto done;
509  }
510 
511  ret = 0;
512 done:
514  if (status < 0) {
515  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
517  }
518 
519  return ret;
520 }
521 
524  uint8_t *data,
525  size_t size,
526  ssize_t index,
528  AVFrame *frame)
529 {
530  if (avctx->codec_type == AVMEDIA_TYPE_AUDIO)
532  else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO)
534  else
535  av_assert0(0);
536 }
537 
538 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
539  int32_t value = 0; \
540  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
541  (name) = value; \
542  } else if (mandatory) { \
543  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
544  ret = AVERROR_EXTERNAL; \
545  goto fail; \
546  } else { \
547  (name) = 0; \
548  } \
549 } while (0) \
550 
552 {
553  int ret = 0;
554  int width = 0;
555  int height = 0;
556  int color_range = 0;
557  int color_standard = 0;
558  int color_transfer = 0;
559  char *format = NULL;
560 
561  if (!s->format) {
562  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
563  return AVERROR(EINVAL);
564  }
565 
566  format = ff_AMediaFormat_toString(s->format);
567  if (!format) {
568  return AVERROR_EXTERNAL;
569  }
570  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
571 
572  /* Mandatory fields */
573  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
574  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
575 
576  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
577  s->stride = s->stride > 0 ? s->stride : s->width;
578 
579  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
580 
581  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
582  s->slice_height = FFALIGN(s->height, 16);
583  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
584  s->slice_height = avctx->height;
585  s->stride = avctx->width;
586  } else if (strstr(s->codec_name, "OMX.MTK.VIDEO.DECODER.MPEG2")) {
587  s->slice_height = s->height;
588  } else if (s->slice_height == 0) {
589  s->slice_height = s->height;
590  }
591 
592  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
593  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
594  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
595  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
596  ret = AVERROR(EINVAL);
597  goto fail;
598  }
599 
600  /* Optional fields */
601  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
602  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
603  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
604  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
605 
606  // Try "crop" for NDK
607  // MediaTek SOC return some default value like Rect(0, 0, 318, 238)
608  if (!(s->crop_right && s->crop_bottom) && s->use_ndk_codec && !strstr(s->codec_name, ".mtk."))
609  ff_AMediaFormat_getRect(s->format, "crop", &s->crop_left, &s->crop_top, &s->crop_right, &s->crop_bottom);
610 
611  if (s->crop_right && s->crop_bottom) {
612  width = s->crop_right + 1 - s->crop_left;
613  height = s->crop_bottom + 1 - s->crop_top;
614  } else {
615  /* TODO: NDK MediaFormat should try getRect() first.
616  * Try crop-width/crop-height, it works on NVIDIA Shield.
617  */
618  AMEDIAFORMAT_GET_INT32(width, "crop-width", 0);
619  AMEDIAFORMAT_GET_INT32(height, "crop-height", 0);
620  }
621  if (!width || !height) {
622  width = s->width;
623  height = s->height;
624  }
625 
626  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
627  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
628 
629  if (s->display_width && s->display_height) {
630  AVRational sar = av_div_q(
631  (AVRational){ s->display_width, s->display_height },
632  (AVRational){ width, height });
633  ff_set_sar(avctx, sar);
634  }
635 
636  AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0);
637  if (color_range)
639 
640  AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0);
641  if (color_standard) {
644  }
645 
646  AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0);
647  if (color_transfer)
649 
650  av_log(avctx, AV_LOG_INFO,
651  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
652  "resulting dimensions width=%d height=%d\n",
653  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
654  width, height);
655 
656  av_freep(&format);
657  return ff_set_dimensions(avctx, width, height);
658 fail:
659  av_freep(&format);
660  return ret;
661 }
662 
664 {
665  int ret = 0;
666  int sample_rate = 0;
667  int channel_count = 0;
668  int channel_mask = 0;
669  int pcm_encoding = 0;
670  char *format = NULL;
671 
672  if (!s->format) {
673  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
674  return AVERROR(EINVAL);
675  }
676 
677  format = ff_AMediaFormat_toString(s->format);
678  if (!format) {
679  return AVERROR_EXTERNAL;
680  }
681  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
682 
683  /* Mandatory fields */
684  AMEDIAFORMAT_GET_INT32(channel_count, "channel-count", 1);
685  AMEDIAFORMAT_GET_INT32(sample_rate, "sample-rate", 1);
686 
687  AMEDIAFORMAT_GET_INT32(pcm_encoding, "pcm-encoding", 0);
688  if (pcm_encoding)
689  avctx->sample_fmt = mcdec_map_pcm_format(avctx, s, pcm_encoding);
690  else
691  avctx->sample_fmt = AV_SAMPLE_FMT_S16;
692 
693  avctx->sample_rate = sample_rate;
694 
695  AMEDIAFORMAT_GET_INT32(channel_mask, "channel-mask", 0);
696  if (channel_mask)
697  av_channel_layout_from_mask(&avctx->ch_layout, mcdec_map_channel_mask(avctx, channel_mask));
698  else
699  av_channel_layout_default(&avctx->ch_layout, channel_count);
700 
701  av_log(avctx, AV_LOG_INFO,
702  "Output parameters channel-count=%d channel-layout=%x sample-rate=%d\n",
703  channel_count, channel_mask, sample_rate);
704 
705 fail:
706  av_freep(&format);
707  return ret;
708 }
709 
711 {
712  if (avctx->codec_type == AVMEDIA_TYPE_AUDIO)
713  return mediacodec_dec_parse_audio_format(avctx, s);
714  else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO)
715  return mediacodec_dec_parse_video_format(avctx, s);
716  else
717  av_assert0(0);
718 }
719 
721 {
722  FFAMediaCodec *codec = s->codec;
723  int status;
724 
725  s->output_buffer_count = 0;
726 
727  s->draining = 0;
728  s->flushing = 0;
729  s->eos = 0;
730  atomic_fetch_add(&s->serial, 1);
731  atomic_init(&s->hw_buffer_count, 0);
732  s->current_input_buffer = -1;
733 
734  status = ff_AMediaCodec_flush(codec);
735  if (status < 0) {
736  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
737  return AVERROR_EXTERNAL;
738  }
739 
740  return 0;
741 }
742 
744  const char *mime, FFAMediaFormat *format)
745 {
746  int profile;
747 
748  enum AVPixelFormat pix_fmt;
749  static const enum AVPixelFormat pix_fmts[] = {
752  };
753 
754  pix_fmt = ff_get_format(avctx, pix_fmts);
756  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
757 
758  if (avctx->hw_device_ctx) {
759  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
760  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
761  if (device_ctx->hwctx) {
762  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
763  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, mediacodec_ctx->native_window, avctx);
764  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
765  }
766  }
767  }
768 
769  if (!s->surface && user_ctx && user_ctx->surface) {
770  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, NULL, avctx);
771  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
772  }
773  }
774 
776  if (profile < 0) {
777  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
778  }
779 
780  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
781  if (!s->codec_name) {
782  // getCodecNameByType() can fail due to missing JVM, while NDK
783  // mediacodec can be used without JVM.
784  if (!s->use_ndk_codec) {
785  return AVERROR_EXTERNAL;
786  }
787  av_log(avctx, AV_LOG_INFO, "Failed to getCodecNameByType\n");
788  } else {
789  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
790  }
791 
792  if (s->codec_name)
793  s->codec = ff_AMediaCodec_createCodecByName(s->codec_name, s->use_ndk_codec);
794  else {
795  s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec);
796  if (s->codec) {
797  s->codec_name = ff_AMediaCodec_getName(s->codec);
798  if (!s->codec_name)
799  s->codec_name = av_strdup(mime);
800  }
801  }
802  if (!s->codec) {
803  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
804  return AVERROR_EXTERNAL;
805  }
806 
807  return 0;
808 }
809 
811  const char *mime, FFAMediaFormat *format)
812 {
813  s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec);
814  if (!s->codec) {
815  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for mime %s\n", mime);
816  return AVERROR_EXTERNAL;
817  }
818 
819  s->codec_name = ff_AMediaCodec_getName(s->codec);
820  if (!s->codec_name) {
821  s->codec_name = av_strdup(mime);
822  if (!s->codec_name)
823  return AVERROR(ENOMEM);
824  }
825 
826  return 0;
827 }
828 
830  const char *mime, FFAMediaFormat *format)
831 {
832  int ret;
833  int status;
834 
835  s->avctx = avctx;
836  atomic_init(&s->refcount, 1);
837  atomic_init(&s->hw_buffer_count, 0);
838  atomic_init(&s->serial, 1);
839  s->current_input_buffer = -1;
840 
841  if (avctx->codec_type == AVMEDIA_TYPE_AUDIO)
842  ret = mediacodec_dec_get_audio_codec(avctx, s, mime, format);
843  else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO)
844  ret = mediacodec_dec_get_video_codec(avctx, s, mime, format);
845  else
846  av_assert0(0);
847  if (ret < 0)
848  goto fail;
849 
850  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
851  if (status < 0) {
853  av_log(avctx, AV_LOG_ERROR,
854  "Failed to configure codec %s (status = %d) with format %s\n",
855  s->codec_name, status, desc);
856  av_freep(&desc);
857 
859  goto fail;
860  }
861 
862  status = ff_AMediaCodec_start(s->codec);
863  if (status < 0) {
865  av_log(avctx, AV_LOG_ERROR,
866  "Failed to start codec %s (status = %d) with format %s\n",
867  s->codec_name, status, desc);
868  av_freep(&desc);
870  goto fail;
871  }
872 
873  if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) {
874  s->format = ff_AMediaCodec_getOutputFormat(s->codec);
875  if (s->format) {
876  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
877  av_log(avctx, AV_LOG_ERROR,
878  "Failed to configure context\n");
879  goto fail;
880  }
881  }
882  }
883 
884  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
885 
886  return 0;
887 
888 fail:
889  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
890  ff_mediacodec_dec_close(avctx, s);
891  return ret;
892 }
893 
895  AVPacket *pkt, bool wait)
896 {
897  int offset = 0;
898  int need_draining = 0;
899  uint8_t *data;
900  size_t size;
901  FFAMediaCodec *codec = s->codec;
902  int status;
903  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
904  int64_t pts;
905 
906  if (s->flushing) {
907  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
908  "until all output buffers have been released\n");
909  return AVERROR_EXTERNAL;
910  }
911 
912  if (pkt->size == 0) {
913  need_draining = 1;
914  }
915 
916  if (s->draining && s->eos) {
917  return AVERROR_EOF;
918  }
919 
920  while (offset < pkt->size || (need_draining && !s->draining)) {
921  ssize_t index = s->current_input_buffer;
922  if (index < 0) {
923  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
925  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
926  break;
927  }
928 
929  if (index < 0) {
930  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
931  return AVERROR_EXTERNAL;
932  }
933  }
934  s->current_input_buffer = -1;
935 
937  if (!data) {
938  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
939  return AVERROR_EXTERNAL;
940  }
941 
942  pts = pkt->pts;
943  if (pts == AV_NOPTS_VALUE) {
944  av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n");
945  pts = 0;
946  }
947  if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
949  }
950 
951  if (need_draining) {
953 
954  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
955 
957  if (status < 0) {
958  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
959  return AVERROR_EXTERNAL;
960  }
961 
962  av_log(avctx, AV_LOG_TRACE,
963  "Queued empty EOS input buffer %zd with flags=%d\n", index, flags);
964 
965  s->draining = 1;
966  return 0;
967  }
968 
969  size = FFMIN(pkt->size - offset, size);
970  memcpy(data, pkt->data + offset, size);
971  offset += size;
972 
974  if (status < 0) {
975  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
976  return AVERROR_EXTERNAL;
977  }
978 
979  av_log(avctx, AV_LOG_TRACE,
980  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
981  }
982 
983  if (offset == 0)
984  return AVERROR(EAGAIN);
985  return offset;
986 }
987 
989  AVFrame *frame, bool wait)
990 {
991  int ret;
992  uint8_t *data;
993  ssize_t index;
994  size_t size;
995  FFAMediaCodec *codec = s->codec;
997  int status;
998  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
999 
1000  if (s->draining && s->eos) {
1001  return AVERROR_EOF;
1002  }
1003 
1004  if (s->draining) {
1005  /* If the codec is flushing or need to be flushed, block for a fair
1006  * amount of time to ensure we got a frame */
1007  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
1008  } else if (s->output_buffer_count == 0 || !wait) {
1009  /* If the codec hasn't produced any frames, do not block so we
1010  * can push data to it as fast as possible, and get the first
1011  * frame */
1012  output_dequeue_timeout_us = 0;
1013  }
1014 
1015  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
1016  if (index >= 0) {
1017  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
1018  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
1019  " flags=%" PRIu32 "\n", index, info.offset, info.size,
1020  info.presentationTimeUs, info.flags);
1021 
1022  if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) {
1023  s->eos = 1;
1024  }
1025 
1026  if (info.size) {
1027  if (s->surface) {
1028  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
1029  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
1030  return ret;
1031  }
1032  } else {
1034  if (!data) {
1035  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
1036  return AVERROR_EXTERNAL;
1037  }
1038 
1039  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
1040  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
1041  return ret;
1042  }
1043  }
1044 
1045  s->output_buffer_count++;
1046  return 0;
1047  } else {
1049  if (status < 0) {
1050  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
1051  }
1052  }
1053 
1054  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
1055  char *format = NULL;
1056 
1057  if (s->format) {
1058  status = ff_AMediaFormat_delete(s->format);
1059  if (status < 0) {
1060  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
1061  }
1062  }
1063 
1064  s->format = ff_AMediaCodec_getOutputFormat(codec);
1065  if (!s->format) {
1066  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
1067  return AVERROR_EXTERNAL;
1068  }
1069 
1070  format = ff_AMediaFormat_toString(s->format);
1071  if (!format) {
1072  return AVERROR_EXTERNAL;
1073  }
1074  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
1075  av_freep(&format);
1076 
1077  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
1078  return ret;
1079  }
1080 
1081  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
1083  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
1084  if (s->draining) {
1085  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
1086  "while draining remaining frames, output will probably lack frames\n",
1087  output_dequeue_timeout_us / 1000);
1088  } else {
1089  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
1090  }
1091  } else {
1092  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
1093  return AVERROR_EXTERNAL;
1094  }
1095 
1096  if (s->draining && s->eos)
1097  return AVERROR_EOF;
1098  return AVERROR(EAGAIN);
1099 }
1100 
1101 /*
1102 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
1103 * the codec (because the user retains frames). The codec stays in the
1104 * flushing state.
1105 *
1106 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
1107 * performed on the codec. The codec leaves the flushing state and can
1108 * process again packets.
1109 *
1110 * ff_mediacodec_dec_flush returns a negative value if an error has
1111 * occurred.
1112 */
1114 {
1115  if (!s->surface || !s->delay_flush || atomic_load(&s->refcount) == 1) {
1116  int ret;
1117 
1118  /* No frames (holding a reference to the codec) are retained by the
1119  * user, thus we can flush the codec and returns accordingly */
1120  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
1121  return ret;
1122  }
1123 
1124  return 1;
1125  }
1126 
1127  s->flushing = 1;
1128  return 0;
1129 }
1130 
1132 {
1133  if (!s)
1134  return 0;
1135 
1136  if (s->codec) {
1137  if (atomic_load(&s->hw_buffer_count) == 0) {
1138  ff_AMediaCodec_stop(s->codec);
1139  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p stopped\n", s->codec);
1140  } else {
1141  av_log(avctx, AV_LOG_DEBUG, "Not stopping MediaCodec (there are buffers pending)\n");
1142  }
1143  }
1144 
1146 
1147  return 0;
1148 }
1149 
1151 {
1152  return s->flushing;
1153 }
flags
const SwsFlags flags[]
Definition: swscale.c:71
ff_AMediaCodec_getInputBuffer
static uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:282
COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
@ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
Definition: mediacodecdec_common.c:200
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
AVCodecContext::hwaccel_context
void * hwaccel_context
Legacy hardware accelerator context.
Definition: avcodec.h:1441
ff_AMediaCodecList_getCodecNameByType
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
Definition: mediacodec_wrapper.c:470
ff_AMediaFormat_delete
static int ff_AMediaFormat_delete(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:92
MediaCodecDecContext
Definition: mediacodecdec_common.h:37
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
ff_AMediaCodec_delete
static int ff_AMediaCodec_delete(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:277
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
COLOR_FormatAndroidOpaque
@ COLOR_FormatAndroidOpaque
Definition: mediacodecdec_common.c:195
mediacodec_dec_get_audio_codec
static int mediacodec_dec_get_audio_codec(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:810
mcdec_map_channel_mask
static uint64_t mcdec_map_channel_mask(AVCodecContext *avctx, int channel_mask)
Definition: mediacodecdec_common.c:178
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:667
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1210
AV_CH_TOP_FRONT_CENTER
#define AV_CH_TOP_FRONT_CENTER
Definition: channel_layout.h:188
ff_AMediaCodec_start
static int ff_AMediaCodec_start(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:262
AVCodecContext::sample_rate
int sample_rate
samples per second
Definition: avcodec.h:1036
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:137
mediacodec_surface.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_AMediaFormatColorStandard_to_AVColorSpace
enum AVColorSpace ff_AMediaFormatColorStandard_to_AVColorSpace(int color_standard)
Map MediaFormat color standard to AVColorSpace.
Definition: mediacodec_wrapper.c:2647
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
AV_TIME_BASE_Q
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:263
int64_t
long long int64_t
Definition: coverity.c:34
mediacodec_wrap_sw_video_buffer
static int mediacodec_wrap_sw_video_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:444
AVMediaCodecDeviceContext::surface
void * surface
android/view/Surface handle, to be filled by the user.
Definition: hwcontext_mediacodec.h:33
OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
Definition: mediacodecdec_common.c:88
mask
int mask
Definition: mediacodecdec_common.c:154
ff_mediacodec_dec_close
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:1131
AV_CH_TOP_FRONT_RIGHT
#define AV_CH_TOP_FRONT_RIGHT
Definition: channel_layout.h:189
ff_AMediaFormat_getRect
static int ff_AMediaFormat_getRect(FFAMediaFormat *format, const char *name, int32_t *left, int32_t *top, int32_t *right, int32_t *bottom)
Definition: mediacodec_wrapper.h:127
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:529
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:660
ff_mediacodec_dec_receive
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
Definition: mediacodecdec_common.c:988
CHANNEL_OUT_TOP_FRONT_LEFT
@ CHANNEL_OUT_TOP_FRONT_LEFT
Definition: mediacodecdec_common.c:144
AVPacket::data
uint8_t * data
Definition: packet.h:588
ff_AMediaCodec_infoOutputFormatChanged
static int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:332
OUTPUT_DEQUEUE_TIMEOUT_US
#define OUTPUT_DEQUEUE_TIMEOUT_US
Definition: mediacodecdec_common.c:87
ff_AMediaCodec_infoOutputBuffersChanged
static int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:327
data
const char data[16]
Definition: mxf.c:149
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
ff_AMediaCodec_queueInputBuffer
static int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
Definition: mediacodec_wrapper.h:297
ff_mediacodec_dec_is_flushing
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:1150
AV_CH_TOP_FRONT_LEFT
#define AV_CH_TOP_FRONT_LEFT
Definition: channel_layout.h:187
atomic_fetch_sub
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:140
CHANNEL_OUT_TOP_CENTER
@ CHANNEL_OUT_TOP_CENTER
Definition: mediacodecdec_common.c:143
AVChannelLayout::nb_channels
int nb_channels
Number of channels in this layout.
Definition: channel_layout.h:329
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:91
CHANNEL_OUT_FRONT_RIGHT_OF_CENTER
@ CHANNEL_OUT_FRONT_RIGHT_OF_CENTER
Definition: mediacodecdec_common.c:139
hwcontext_mediacodec.h
CHANNEL_OUT_BACK_RIGHT
@ CHANNEL_OUT_BACK_RIGHT
Definition: mediacodecdec_common.c:137
COLOR_QCOM_FormatYUV420SemiPlanar
@ COLOR_QCOM_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:196
AV_CH_TOP_BACK_LEFT
#define AV_CH_TOP_BACK_LEFT
Definition: channel_layout.h:190
ff_AMediaCodec_configure
static int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, FFANativeWindow *surface, void *crypto, uint32_t flags)
Definition: mediacodec_wrapper.h:254
COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
@ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
Definition: mediacodecdec_common.c:198
COLOR_TI_FormatYUV420PackedSemiPlanar
@ COLOR_TI_FormatYUV420PackedSemiPlanar
Definition: mediacodecdec_common.c:199
mediacodec_wrap_sw_audio_buffer
static int mediacodec_wrap_sw_audio_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:371
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:272
AV_CH_TOP_BACK_CENTER
#define AV_CH_TOP_BACK_CENTER
Definition: channel_layout.h:191
AVCodecContext::ch_layout
AVChannelLayout ch_layout
Audio channel layout.
Definition: avcodec.h:1051
fail
#define fail()
Definition: checkasm.h:221
AV_CH_BACK_LEFT
#define AV_CH_BACK_LEFT
Definition: channel_layout.h:179
sample_formats
static const struct @200 sample_formats[]
ff_mediacodec_dec_flush
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:1113
pts
static int64_t pts
Definition: transcode_aac.c:644
AVRational::num
int num
Numerator.
Definition: rational.h:59
mediacodecdec_common.h
channel_masks
static const struct @202 channel_masks[]
COLOR_FormatYCbYCr
@ COLOR_FormatYCbYCr
Definition: mediacodecdec_common.c:194
INPUT_DEQUEUE_TIMEOUT_US
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
Definition: mediacodecdec_common.c:86
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
avassert.h
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:653
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:236
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
ENCODING_PCM_8BIT
@ ENCODING_PCM_8BIT
Definition: mediacodecdec_common.c:92
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:642
AV_CH_LOW_FREQUENCY
#define AV_CH_LOW_FREQUENCY
Definition: channel_layout.h:178
mcdec_map_color_format
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
Definition: mediacodecdec_common.c:220
ff_AMediaCodec_getName
static char * ff_AMediaCodec_getName(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:245
ff_AMediaCodec_getBufferFlagEndOfStream
static int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:342
s
#define s(width, name)
Definition: cbs_vp9.c:198
CHANNEL_OUT_LOW_FREQUENCY
@ CHANNEL_OUT_LOW_FREQUENCY
Definition: mediacodecdec_common.c:135
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
CHANNEL_OUT_FRONT_RIGHT
@ CHANNEL_OUT_FRONT_RIGHT
Definition: mediacodecdec_common.c:133
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:201
av_channel_layout_from_mask
int av_channel_layout_from_mask(AVChannelLayout *channel_layout, uint64_t mask)
Initialize a native channel layout from a bitmask indicating which channels are present.
Definition: channel_layout.c:252
info
MIPS optimizations info
Definition: mips.txt:2
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:42
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
decode.h
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:181
color_range
color_range
Definition: vf_selectivecolor.c:43
av_mallocz
#define av_mallocz(s)
Definition: tableprint_vlc.h:31
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AV_PIX_FMT_MEDIACODEC
@ AV_PIX_FMT_MEDIACODEC
hardware decoding through MediaCodec
Definition: pixfmt.h:316
CHANNEL_OUT_SIDE_LEFT
@ CHANNEL_OUT_SIDE_LEFT
Definition: mediacodecdec_common.c:141
CHANNEL_OUT_SIDE_RIGHT
@ CHANNEL_OUT_SIDE_RIGHT
Definition: mediacodecdec_common.c:142
ff_AMediaCodec_getOutputFormat
static FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:307
ff_AMediaCodec_createCodecByName
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name, int ndk)
Definition: mediacodec_wrapper.c:2538
AV_CH_TOP_CENTER
#define AV_CH_TOP_CENTER
Definition: channel_layout.h:186
NULL
#define NULL
Definition: coverity.c:32
ff_AMediaCodec_flush
static int ff_AMediaCodec_flush(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:272
FFAMediaCodecBufferInfo
Definition: mediacodec_wrapper.h:172
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:677
mcdec_map_pcm_format
static enum AVSampleFormat mcdec_map_pcm_format(AVCodecContext *avctx, MediaCodecDecContext *s, int pcm_format)
Definition: mediacodecdec_common.c:112
AVMediaCodecContext
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
CHANNEL_OUT_TOP_FRONT_RIGHT
@ CHANNEL_OUT_TOP_FRONT_RIGHT
Definition: mediacodecdec_common.c:146
layout
uint64_t layout
Definition: mediacodecdec_common.c:155
CHANNEL_OUT_TOP_BACK_RIGHT
@ CHANNEL_OUT_TOP_BACK_RIGHT
Definition: mediacodecdec_common.c:149
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:106
ff_AMediaCodec_stop
static int ff_AMediaCodec_stop(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:267
CHANNEL_OUT_FRONT_CENTER
@ CHANNEL_OUT_FRONT_CENTER
Definition: mediacodecdec_common.c:134
mediacodec_sw_buffer.h
ff_mediacodec_surface_unref
int ff_mediacodec_surface_unref(FFANativeWindow *window, void *log_ctx)
Definition: mediacodec_surface.c:59
time.h
ff_mediacodec_dec_ref
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:247
AV_CH_FRONT_CENTER
#define AV_CH_FRONT_CENTER
Definition: channel_layout.h:177
AV_CH_FRONT_LEFT_OF_CENTER
#define AV_CH_FRONT_LEFT_OF_CENTER
Definition: channel_layout.h:181
ENCODING_PCM_FLOAT
@ ENCODING_PCM_FLOAT
Definition: mediacodecdec_common.c:93
index
int index
Definition: gxfenc.c:90
AVMediaCodecDeviceContext
MediaCodec details.
Definition: hwcontext_mediacodec.h:27
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
ff_AMediaFormat_toString
static char * ff_AMediaFormat_toString(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:97
AMEDIAFORMAT_GET_INT32
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
Definition: mediacodecdec_common.c:538
ENCODING_PCM_16BIT
@ ENCODING_PCM_16BIT
Definition: mediacodecdec_common.c:91
ff_mediacodec_sw_buffer_copy_yuv420_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:131
mediacodec_dec_parse_format
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:710
ff_mediacodec_sw_buffer_copy_yuv420_planar
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
Definition: mediacodec_sw_buffer.c:76
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1747
ff_AMediaFormatColorStandard_to_AVColorPrimaries
enum AVColorPrimaries ff_AMediaFormatColorStandard_to_AVColorPrimaries(int color_standard)
Map MediaFormat color standard to AVColorPrimaries.
Definition: mediacodec_wrapper.c:2665
AVPacket::size
int size
Definition: packet.h:589
pix_fmts
static enum AVPixelFormat pix_fmts[4][4]
Definition: lcevc_parser.c:75
height
#define height
Definition: dsp.h:89
i
#define i(width, name, range_min, range_max)
Definition: cbs_h264.c:63
COLOR_FormatYUV420Planar
@ COLOR_FormatYUV420Planar
Definition: mediacodecdec_common.c:192
AVCodecContext::sample_fmt
enum AVSampleFormat sample_fmt
audio sample format
Definition: avcodec.h:1043
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are expressed.
Definition: avcodec.h:550
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:56
mediacodec_dec_get_video_codec
static int mediacodec_dec_get_video_codec(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:743
size
int size
Definition: twinvq_data.h:10344
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:247
pix_fmt
enum AVPixelFormat pix_fmt
Definition: mediacodecdec_common.c:206
color_formats
static const struct @204 color_formats[]
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
ff_AMediaCodecProfile_getProfileFromAVCodecContext
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
Definition: mediacodec_wrapper.c:309
AV_CH_TOP_BACK_RIGHT
#define AV_CH_TOP_BACK_RIGHT
Definition: channel_layout.h:192
AV_CH_FRONT_RIGHT_OF_CENTER
#define AV_CH_FRONT_RIGHT_OF_CENTER
Definition: channel_layout.h:182
ff_AMediaCodec_createDecoderByType
FFAMediaCodec * ff_AMediaCodec_createDecoderByType(const char *mime_type, int ndk)
Definition: mediacodec_wrapper.c:2545
mediacodec_dec_flush_codec
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:720
mediacodec_buffer_release
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
Definition: mediacodecdec_common.c:278
ENCODING_PCM_32BIT
@ ENCODING_PCM_32BIT
Definition: mediacodecdec_common.c:95
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
av_channel_layout_default
void av_channel_layout_default(AVChannelLayout *ch_layout, int nb_channels)
Get the default channel layout for a given number of channels.
Definition: channel_layout.c:839
mediacodec_wrap_sw_buffer
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:522
ff_mediacodec_dec_unref
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:252
ff_AMediaFormatColorTransfer_to_AVColorTransfer
enum AVColorTransferCharacteristic ff_AMediaFormatColorTransfer_to_AVColorTransfer(int color_transfer)
Map MediaFormat color transfer to AVColorTransferCharacteristic.
Definition: mediacodec_wrapper.c:2675
log.h
mediacodec_wrapper.h
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:581
av_get_bytes_per_sample
int av_get_bytes_per_sample(enum AVSampleFormat sample_fmt)
Return number of bytes per sample.
Definition: samplefmt.c:108
FFAMediaCodec
Definition: mediacodec_wrapper.h:197
AV_SAMPLE_FMT_U8
@ AV_SAMPLE_FMT_U8
unsigned 8 bits
Definition: samplefmt.h:57
CHANNEL_OUT_TOP_FRONT_CENTER
@ CHANNEL_OUT_TOP_FRONT_CENTER
Definition: mediacodecdec_common.c:145
common.h
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:55
mediacodec_dec_parse_audio_format
static int mediacodec_dec_parse_audio_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:663
AV_CH_BACK_CENTER
#define AV_CH_BACK_CENTER
Definition: channel_layout.h:183
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_CH_FRONT_LEFT
#define AV_CH_FRONT_LEFT
Definition: channel_layout.h:175
CHANNEL_OUT_TOP_BACK_CENTER
@ CHANNEL_OUT_TOP_BACK_CENTER
Definition: mediacodecdec_common.c:148
AV_SAMPLE_FMT_S16
@ AV_SAMPLE_FMT_S16
signed 16 bits
Definition: samplefmt.h:58
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1487
AVMediaCodecContext::surface
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
ff_mediacodec_surface_ref
FFANativeWindow * ff_mediacodec_surface_ref(void *surface, void *native_window, void *log_ctx)
Definition: mediacodec_surface.c:30
AV_CH_SIDE_RIGHT
#define AV_CH_SIDE_RIGHT
Definition: channel_layout.h:185
profile
int profile
Definition: mxfenc.c:2297
AVCodecContext::height
int height
Definition: avcodec.h:600
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:639
AVMediaCodecDeviceContext::native_window
void * native_window
Pointer to ANativeWindow.
Definition: hwcontext_mediacodec.h:45
ff_AMediaCodec_dequeueInputBuffer
static ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:292
avcodec.h
ff_AMediaFormatColorRange_to_AVColorRange
enum AVColorRange ff_AMediaFormatColorRange_to_AVColorRange(int color_range)
Map MediaFormat color range to AVColorRange.
Definition: mediacodec_wrapper.c:2630
ret
ret
Definition: filter_design.txt:187
CHANNEL_OUT_BACK_CENTER
@ CHANNEL_OUT_BACK_CENTER
Definition: mediacodecdec_common.c:140
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
CHANNEL_OUT_FRONT_LEFT
@ CHANNEL_OUT_FRONT_LEFT
Definition: mediacodecdec_common.c:132
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
COLOR_QCOM_FormatYUV420SemiPlanar32m
@ COLOR_QCOM_FormatYUV420SemiPlanar32m
Definition: mediacodecdec_common.c:197
AVCodecContext
main external API structure.
Definition: avcodec.h:439
status
ov_status_e status
Definition: dnn_backend_openvino.c:100
channel_layout.h
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
COLOR_FormatYUV420SemiPlanar
@ COLOR_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:193
CHANNEL_OUT_FRONT_LEFT_OF_CENTER
@ CHANNEL_OUT_FRONT_LEFT_OF_CENTER
Definition: mediacodecdec_common.c:138
pcm_format
int pcm_format
Definition: mediacodecdec_common.c:100
color_format
int color_format
Definition: mediacodecdec_common.c:205
AV_CH_FRONT_RIGHT
#define AV_CH_FRONT_RIGHT
Definition: channel_layout.h:176
av_channel_layout_copy
int av_channel_layout_copy(AVChannelLayout *dst, const AVChannelLayout *src)
Make a copy of a channel layout.
Definition: channel_layout.c:449
AVCodecContext::codec_type
enum AVMediaType codec_type
Definition: avcodec.h:447
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:82
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
mem.h
ff_AMediaCodec_getOutputBuffer
static uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:287
sample_format
enum AVSampleFormat sample_format
Definition: mediacodecdec_common.c:101
mediacodec_wrap_hw_buffer
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:296
ff_AMediaCodec_cleanOutputBuffers
static int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:357
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVPacket
This structure stores compressed data.
Definition: packet.h:565
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
ff_AMediaCodec_infoTryAgainLater
static int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:322
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:600
ff_mediacodec_dec_send
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
Definition: mediacodecdec_common.c:894
timestamp.h
ENCODING_PCM_24BIT_PACKED
@ ENCODING_PCM_24BIT_PACKED
Definition: mediacodecdec_common.c:94
ff_AMediaCodec_releaseOutputBuffer
static int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
Definition: mediacodec_wrapper.h:312
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AV_CH_BACK_RIGHT
#define AV_CH_BACK_RIGHT
Definition: channel_layout.h:180
pkt
static AVPacket * pkt
Definition: demux_decode.c:55
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
width
#define width
Definition: dsp.h:89
FFAMediaFormat
Definition: mediacodec_wrapper.h:63
mediacodec_dec_parse_video_format
static int mediacodec_dec_parse_video_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:551
AV_SAMPLE_FMT_S32
@ AV_SAMPLE_FMT_S32
signed 32 bits
Definition: samplefmt.h:59
CHANNEL_OUT_TOP_BACK_LEFT
@ CHANNEL_OUT_TOP_BACK_LEFT
Definition: mediacodecdec_common.c:147
ff_AMediaCodec_dequeueOutputBuffer
static ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:302
AV_SAMPLE_FMT_FLT
@ AV_SAMPLE_FMT_FLT
float
Definition: samplefmt.h:60
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:624
CHANNEL_OUT_BACK_LEFT
@ CHANNEL_OUT_BACK_LEFT
Definition: mediacodecdec_common.c:136
AV_CH_SIDE_LEFT
#define AV_CH_SIDE_LEFT
Definition: channel_layout.h:184
mediacodec.h
ff_mediacodec_dec_init
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:829