FFmpeg
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/common.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/log.h"
30 #include "libavutil/pixfmt.h"
31 #include "libavutil/time.h"
32 #include "libavutil/timestamp.h"
33 
34 #include "avcodec.h"
35 #include "decode.h"
36 
37 #include "mediacodec.h"
38 #include "mediacodec_surface.h"
39 #include "mediacodec_sw_buffer.h"
40 #include "mediacodec_wrapper.h"
41 #include "mediacodecdec_common.h"
42 
43 /**
44  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
45  * codec workarounds used in various place are taken from the Gstreamer
46  * project.
47  *
48  * Gstreamer references:
49  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
50  *
51  * Gstreamer copyright notice:
52  *
53  * Copyright (C) 2012, Collabora Ltd.
54  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
55  *
56  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
57  *
58  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
59  *
60  * Copyright (C) 2014-2015, Collabora Ltd.
61  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
62  *
63  * Copyright (C) 2015, Edward Hervey
64  * Author: Edward Hervey <bilboed@gmail.com>
65  *
66  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
67  *
68  * This library is free software; you can redistribute it and/or
69  * modify it under the terms of the GNU Lesser General Public
70  * License as published by the Free Software Foundation
71  * version 2.1 of the License.
72  *
73  * This library is distributed in the hope that it will be useful,
74  * but WITHOUT ANY WARRANTY; without even the implied warranty of
75  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
76  * Lesser General Public License for more details.
77  *
78  * You should have received a copy of the GNU Lesser General Public
79  * License along with this library; if not, write to the Free Software
80  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
81  *
82  */
83 
84 #define INPUT_DEQUEUE_TIMEOUT_US 8000
85 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
86 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
87 
88 enum {
91 };
92 
94 {
95  switch (color_range) {
96  case COLOR_RANGE_FULL:
97  return AVCOL_RANGE_JPEG;
99  return AVCOL_RANGE_MPEG;
100  default:
102  }
103 }
104 
105 enum {
110 };
111 
112 static enum AVColorSpace mcdec_get_color_space(int color_standard)
113 {
114  switch (color_standard) {
116  return AVCOL_SPC_BT709;
118  return AVCOL_SPC_BT470BG;
120  return AVCOL_SPC_SMPTE170M;
122  return AVCOL_SPC_BT2020_NCL;
123  default:
124  return AVCOL_SPC_UNSPECIFIED;
125  }
126 }
127 
128 static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
129 {
130  switch (color_standard) {
132  return AVCOL_PRI_BT709;
134  return AVCOL_PRI_BT470BG;
136  return AVCOL_PRI_SMPTE170M;
138  return AVCOL_PRI_BT2020;
139  default:
140  return AVCOL_PRI_UNSPECIFIED;
141  }
142 }
143 
144 enum {
149 };
150 
151 static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
152 {
153  switch (color_transfer) {
155  return AVCOL_TRC_LINEAR;
157  return AVCOL_TRC_SMPTE170M;
159  return AVCOL_TRC_SMPTEST2084;
160  case COLOR_TRANSFER_HLG:
161  return AVCOL_TRC_ARIB_STD_B67;
162  default:
163  return AVCOL_TRC_UNSPECIFIED;
164  }
165 }
166 
167 enum {
177 };
178 
179 static const struct {
180 
183 
184 } color_formats[] = {
185 
193  { 0 }
194 };
195 
198  int color_format)
199 {
200  int i;
202 
203  if (s->surface) {
204  return AV_PIX_FMT_MEDIACODEC;
205  }
206 
207  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
209  }
210 
211  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
213  return color_formats[i].pix_fmt;
214  }
215  }
216 
217  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
219 
220  return ret;
221 }
222 
224 {
225  atomic_fetch_add(&s->refcount, 1);
226 }
227 
229 {
230  if (!s)
231  return;
232 
233  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
234  if (s->codec) {
235  ff_AMediaCodec_delete(s->codec);
236  s->codec = NULL;
237  }
238 
239  if (s->format) {
240  ff_AMediaFormat_delete(s->format);
241  s->format = NULL;
242  }
243 
244  if (s->surface) {
246  s->surface = NULL;
247  }
248 
249  av_freep(&s->codec_name);
250  av_freep(&s);
251  }
252 }
253 
254 static void mediacodec_buffer_release(void *opaque, uint8_t *data)
255 {
256  AVMediaCodecBuffer *buffer = opaque;
258  int released = atomic_load(&buffer->released);
259 
260  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
261  atomic_fetch_sub(&ctx->hw_buffer_count, 1);
262  av_log(ctx->avctx, AV_LOG_DEBUG,
263  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
264  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
265  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
266  }
267 
269  av_freep(&buffer);
270 }
271 
274  ssize_t index,
276  AVFrame *frame)
277 {
278  int ret = 0;
279  int status = 0;
280  AVMediaCodecBuffer *buffer = NULL;
281 
282  frame->buf[0] = NULL;
283  frame->width = avctx->width;
284  frame->height = avctx->height;
285  frame->format = avctx->pix_fmt;
286  frame->sample_aspect_ratio = avctx->sample_aspect_ratio;
287 
288  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
289  frame->pts = av_rescale_q(info->presentationTimeUs,
291  avctx->pkt_timebase);
292  } else {
293  frame->pts = info->presentationTimeUs;
294  }
295  frame->pkt_dts = AV_NOPTS_VALUE;
296  frame->color_range = avctx->color_range;
297  frame->color_primaries = avctx->color_primaries;
298  frame->color_trc = avctx->color_trc;
299  frame->colorspace = avctx->colorspace;
300 
301  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
302  if (!buffer) {
303  ret = AVERROR(ENOMEM);
304  goto fail;
305  }
306 
307  atomic_init(&buffer->released, 0);
308 
309  frame->buf[0] = av_buffer_create(NULL,
310  0,
312  buffer,
314 
315  if (!frame->buf[0]) {
316  ret = AVERROR(ENOMEM);
317  goto fail;
318 
319  }
320 
321  buffer->ctx = s;
322  buffer->serial = atomic_load(&s->serial);
324 
325  buffer->index = index;
326  buffer->pts = info->presentationTimeUs;
327 
328  frame->data[3] = (uint8_t *)buffer;
329 
330  atomic_fetch_add(&s->hw_buffer_count, 1);
331  av_log(avctx, AV_LOG_DEBUG,
332  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
333  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
334 
335  return 0;
336 fail:
337  av_freep(&buffer);
339  if (status < 0) {
340  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
342  }
343 
344  return ret;
345 }
346 
349  uint8_t *data,
350  size_t size,
351  ssize_t index,
353  AVFrame *frame)
354 {
355  int ret = 0;
356  int status = 0;
357 
358  frame->width = avctx->width;
359  frame->height = avctx->height;
360  frame->format = avctx->pix_fmt;
361 
362  /* MediaCodec buffers needs to be copied to our own refcounted buffers
363  * because the flush command invalidates all input and output buffers.
364  */
365  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
366  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
367  goto done;
368  }
369 
370  /* Override frame->pkt_pts as ff_get_buffer will override its value based
371  * on the last avpacket received which is not in sync with the frame:
372  * * N avpackets can be pushed before 1 frame is actually returned
373  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
374  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
375  frame->pts = av_rescale_q(info->presentationTimeUs,
377  avctx->pkt_timebase);
378  } else {
379  frame->pts = info->presentationTimeUs;
380  }
381  frame->pkt_dts = AV_NOPTS_VALUE;
382 
383  av_log(avctx, AV_LOG_TRACE,
384  "Frame: width=%d stride=%d height=%d slice-height=%d "
385  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
386  "destination linesizes=%d,%d,%d\n" ,
387  avctx->width, s->stride, avctx->height, s->slice_height,
388  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
389  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
390 
391  switch (s->color_format) {
394  break;
399  break;
403  break;
406  break;
407  default:
408  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
409  s->color_format, s->color_format);
410  ret = AVERROR(EINVAL);
411  goto done;
412  }
413 
414  ret = 0;
415 done:
417  if (status < 0) {
418  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
420  }
421 
422  return ret;
423 }
424 
425 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
426  int32_t value = 0; \
427  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
428  (name) = value; \
429  } else if (mandatory) { \
430  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
431  ret = AVERROR_EXTERNAL; \
432  goto fail; \
433  } \
434 } while (0) \
435 
437 {
438  int ret = 0;
439  int width = 0;
440  int height = 0;
441  int color_range = 0;
442  int color_standard = 0;
443  int color_transfer = 0;
444  char *format = NULL;
445 
446  if (!s->format) {
447  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
448  return AVERROR(EINVAL);
449  }
450 
451  format = ff_AMediaFormat_toString(s->format);
452  if (!format) {
453  return AVERROR_EXTERNAL;
454  }
455  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
456 
457  /* Mandatory fields */
458  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
459  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
460 
461  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
462  s->stride = s->stride > 0 ? s->stride : s->width;
463 
464  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
465 
466  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
467  s->slice_height = FFALIGN(s->height, 16);
468  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
469  s->slice_height = avctx->height;
470  s->stride = avctx->width;
471  } else if (s->slice_height == 0) {
472  s->slice_height = s->height;
473  }
474 
475  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
476  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
477  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
478  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
479  ret = AVERROR(EINVAL);
480  goto fail;
481  }
482 
483  /* Optional fields */
484  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
485  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
486  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
487  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
488 
489  if (s->crop_right && s->crop_bottom) {
490  width = s->crop_right + 1 - s->crop_left;
491  height = s->crop_bottom + 1 - s->crop_top;
492  } else {
493  /* TODO: NDK MediaFormat should try getRect() first.
494  * Try crop-width/crop-height, it works on NVIDIA Shield.
495  */
496  AMEDIAFORMAT_GET_INT32(width, "crop-width", 0);
497  AMEDIAFORMAT_GET_INT32(height, "crop-height", 0);
498  }
499  if (!width || !height) {
500  width = s->width;
501  height = s->height;
502  }
503 
504  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
505  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
506 
507  if (s->display_width && s->display_height) {
508  AVRational sar = av_div_q(
509  (AVRational){ s->display_width, s->display_height },
510  (AVRational){ width, height });
511  ff_set_sar(avctx, sar);
512  }
513 
514  AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0);
515  if (color_range)
517 
518  AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0);
519  if (color_standard) {
520  avctx->colorspace = mcdec_get_color_space(color_standard);
521  avctx->color_primaries = mcdec_get_color_pri(color_standard);
522  }
523 
524  AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0);
525  if (color_transfer)
526  avctx->color_trc = mcdec_get_color_trc(color_transfer);
527 
528  av_log(avctx, AV_LOG_INFO,
529  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
530  "resulting dimensions width=%d height=%d\n",
531  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
532  width, height);
533 
534  av_freep(&format);
535  return ff_set_dimensions(avctx, width, height);
536 fail:
537  av_freep(&format);
538  return ret;
539 }
540 
542 {
543  FFAMediaCodec *codec = s->codec;
544  int status;
545 
546  s->output_buffer_count = 0;
547 
548  s->draining = 0;
549  s->flushing = 0;
550  s->eos = 0;
551  atomic_fetch_add(&s->serial, 1);
552  atomic_init(&s->hw_buffer_count, 0);
553  s->current_input_buffer = -1;
554 
555  status = ff_AMediaCodec_flush(codec);
556  if (status < 0) {
557  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
558  return AVERROR_EXTERNAL;
559  }
560 
561  return 0;
562 }
563 
565  const char *mime, FFAMediaFormat *format)
566 {
567  int ret = 0;
568  int status;
569  int profile;
570 
571  enum AVPixelFormat pix_fmt;
572  static const enum AVPixelFormat pix_fmts[] = {
575  };
576 
577  s->avctx = avctx;
578  atomic_init(&s->refcount, 1);
579  atomic_init(&s->hw_buffer_count, 0);
580  atomic_init(&s->serial, 1);
581  s->current_input_buffer = -1;
582 
583  pix_fmt = ff_get_format(avctx, pix_fmts);
585  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
586 
587  if (avctx->hw_device_ctx) {
588  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
589  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
590  if (device_ctx->hwctx) {
591  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
592  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, mediacodec_ctx->native_window, avctx);
593  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
594  }
595  }
596  }
597 
598  if (!s->surface && user_ctx && user_ctx->surface) {
599  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, NULL, avctx);
600  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
601  }
602  }
603 
605  if (profile < 0) {
606  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
607  }
608 
609  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
610  if (!s->codec_name) {
611  // getCodecNameByType() can fail due to missing JVM, while NDK
612  // mediacodec can be used without JVM.
613  if (!s->use_ndk_codec) {
615  goto fail;
616  }
617  av_log(avctx, AV_LOG_INFO, "Failed to getCodecNameByType\n");
618  } else {
619  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
620  }
621 
622  if (s->codec_name)
623  s->codec = ff_AMediaCodec_createCodecByName(s->codec_name, s->use_ndk_codec);
624  else {
625  s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec);
626  if (s->codec) {
627  s->codec_name = ff_AMediaCodec_getName(s->codec);
628  if (!s->codec_name)
629  s->codec_name = av_strdup(mime);
630  }
631  }
632  if (!s->codec) {
633  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
635  goto fail;
636  }
637 
638  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
639  if (status < 0) {
641  av_log(avctx, AV_LOG_ERROR,
642  "Failed to configure codec %s (status = %d) with format %s\n",
643  s->codec_name, status, desc);
644  av_freep(&desc);
645 
647  goto fail;
648  }
649 
650  status = ff_AMediaCodec_start(s->codec);
651  if (status < 0) {
653  av_log(avctx, AV_LOG_ERROR,
654  "Failed to start codec %s (status = %d) with format %s\n",
655  s->codec_name, status, desc);
656  av_freep(&desc);
658  goto fail;
659  }
660 
661  s->format = ff_AMediaCodec_getOutputFormat(s->codec);
662  if (s->format) {
663  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
664  av_log(avctx, AV_LOG_ERROR,
665  "Failed to configure context\n");
666  goto fail;
667  }
668  }
669 
670  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
671 
672  return 0;
673 
674 fail:
675  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
676  ff_mediacodec_dec_close(avctx, s);
677  return ret;
678 }
679 
681  AVPacket *pkt, bool wait)
682 {
683  int offset = 0;
684  int need_draining = 0;
685  uint8_t *data;
686  size_t size;
687  FFAMediaCodec *codec = s->codec;
688  int status;
689  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
690  int64_t pts;
691 
692  if (s->flushing) {
693  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
694  "until all output buffers have been released\n");
695  return AVERROR_EXTERNAL;
696  }
697 
698  if (pkt->size == 0) {
699  need_draining = 1;
700  }
701 
702  if (s->draining && s->eos) {
703  return AVERROR_EOF;
704  }
705 
706  while (offset < pkt->size || (need_draining && !s->draining)) {
707  ssize_t index = s->current_input_buffer;
708  if (index < 0) {
709  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
711  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
712  break;
713  }
714 
715  if (index < 0) {
716  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
717  return AVERROR_EXTERNAL;
718  }
719  }
720  s->current_input_buffer = -1;
721 
723  if (!data) {
724  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
725  return AVERROR_EXTERNAL;
726  }
727 
728  pts = pkt->pts;
729  if (pts == AV_NOPTS_VALUE) {
730  av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n");
731  pts = 0;
732  }
733  if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
735  }
736 
737  if (need_draining) {
739 
740  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
741 
743  if (status < 0) {
744  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
745  return AVERROR_EXTERNAL;
746  }
747 
748  av_log(avctx, AV_LOG_TRACE,
749  "Queued empty EOS input buffer %zd with flags=%d\n", index, flags);
750 
751  s->draining = 1;
752  return 0;
753  }
754 
755  size = FFMIN(pkt->size - offset, size);
756  memcpy(data, pkt->data + offset, size);
757  offset += size;
758 
760  if (status < 0) {
761  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
762  return AVERROR_EXTERNAL;
763  }
764 
765  av_log(avctx, AV_LOG_TRACE,
766  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
767  }
768 
769  if (offset == 0)
770  return AVERROR(EAGAIN);
771  return offset;
772 }
773 
775  AVFrame *frame, bool wait)
776 {
777  int ret;
778  uint8_t *data;
779  ssize_t index;
780  size_t size;
781  FFAMediaCodec *codec = s->codec;
783  int status;
784  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
785 
786  if (s->draining && s->eos) {
787  return AVERROR_EOF;
788  }
789 
790  if (s->draining) {
791  /* If the codec is flushing or need to be flushed, block for a fair
792  * amount of time to ensure we got a frame */
793  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
794  } else if (s->output_buffer_count == 0 || !wait) {
795  /* If the codec hasn't produced any frames, do not block so we
796  * can push data to it as fast as possible, and get the first
797  * frame */
798  output_dequeue_timeout_us = 0;
799  }
800 
801  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
802  if (index >= 0) {
803  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
804  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
805  " flags=%" PRIu32 "\n", index, info.offset, info.size,
806  info.presentationTimeUs, info.flags);
807 
808  if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) {
809  s->eos = 1;
810  }
811 
812  if (info.size) {
813  if (s->surface) {
814  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
815  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
816  return ret;
817  }
818  } else {
820  if (!data) {
821  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
822  return AVERROR_EXTERNAL;
823  }
824 
825  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
826  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
827  return ret;
828  }
829  }
830 
831  s->output_buffer_count++;
832  return 0;
833  } else {
835  if (status < 0) {
836  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
837  }
838  }
839 
840  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
841  char *format = NULL;
842 
843  if (s->format) {
844  status = ff_AMediaFormat_delete(s->format);
845  if (status < 0) {
846  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
847  }
848  }
849 
850  s->format = ff_AMediaCodec_getOutputFormat(codec);
851  if (!s->format) {
852  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
853  return AVERROR_EXTERNAL;
854  }
855 
856  format = ff_AMediaFormat_toString(s->format);
857  if (!format) {
858  return AVERROR_EXTERNAL;
859  }
860  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
861  av_freep(&format);
862 
863  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
864  return ret;
865  }
866 
867  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
869  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
870  if (s->draining) {
871  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
872  "while draining remaining frames, output will probably lack frames\n",
873  output_dequeue_timeout_us / 1000);
874  } else {
875  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
876  }
877  } else {
878  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
879  return AVERROR_EXTERNAL;
880  }
881 
882  return AVERROR(EAGAIN);
883 }
884 
885 /*
886 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
887 * the codec (because the user retains frames). The codec stays in the
888 * flushing state.
889 *
890 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
891 * performed on the codec. The codec leaves the flushing state and can
892 * process again packets.
893 *
894 * ff_mediacodec_dec_flush returns a negative value if an error has
895 * occurred.
896 */
898 {
899  if (!s->surface || !s->delay_flush || atomic_load(&s->refcount) == 1) {
900  int ret;
901 
902  /* No frames (holding a reference to the codec) are retained by the
903  * user, thus we can flush the codec and returns accordingly */
904  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
905  return ret;
906  }
907 
908  return 1;
909  }
910 
911  s->flushing = 1;
912  return 0;
913 }
914 
916 {
918 
919  return 0;
920 }
921 
923 {
924  return s->flushing;
925 }
ff_AMediaCodec_getInputBuffer
static uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:237
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
AVCodecContext::hwaccel_context
void * hwaccel_context
Legacy hardware accelerator context.
Definition: avcodec.h:1393
ff_AMediaCodecList_getCodecNameByType
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
Definition: mediacodec_wrapper.c:396
ff_AMediaFormat_delete
static int ff_AMediaFormat_delete(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:86
MediaCodecDecContext
Definition: mediacodecdec_common.h:37
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
ff_AMediaCodec_delete
static int ff_AMediaCodec_delete(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:232
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:975
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:558
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1147
ff_AMediaCodec_start
static int ff_AMediaCodec_start(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:217
mediacodec_surface.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVCOL_TRC_LINEAR
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:567
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
AV_TIME_BASE_Q
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:260
COLOR_TRANSFER_LINEAR
@ COLOR_TRANSFER_LINEAR
Definition: mediacodecdec_common.c:145
AVMediaCodecDeviceContext::surface
void * surface
android/view/Surface handle, to be filled by the user.
Definition: hwcontext_mediacodec.h:33
OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
Definition: mediacodecdec_common.c:86
ff_mediacodec_dec_close
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:915
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:432
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:968
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:661
ff_mediacodec_dec_receive
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
Definition: mediacodecdec_common.c:774
AVPacket::data
uint8_t * data
Definition: packet.h:374
ff_AMediaCodec_infoOutputFormatChanged
static int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:287
OUTPUT_DEQUEUE_TIMEOUT_US
#define OUTPUT_DEQUEUE_TIMEOUT_US
Definition: mediacodecdec_common.c:85
AVCOL_TRC_UNSPECIFIED
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:561
ff_AMediaCodec_infoOutputBuffersChanged
static int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:282
data
const char data[16]
Definition: mxf.c:146
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
ff_AMediaCodec_queueInputBuffer
static int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
Definition: mediacodec_wrapper.h:252
ff_mediacodec_dec_is_flushing
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:922
COLOR_FormatAndroidOpaque
@ COLOR_FormatAndroidOpaque
Definition: mediacodecdec_common.c:171
AVColorPrimaries
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:533
COLOR_RANGE_FULL
@ COLOR_RANGE_FULL
Definition: mediacodecdec_common.c:89
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:91
hwcontext_mediacodec.h
ff_AMediaCodec_configure
static int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, FFANativeWindow *surface, void *crypto, uint32_t flags)
Definition: mediacodec_wrapper.h:209
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:593
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:272
fail
#define fail()
Definition: checkasm.h:134
COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
@ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
Definition: mediacodecdec_common.c:176
ff_mediacodec_dec_flush
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:897
COLOR_TRANSFER_SDR_VIDEO
@ COLOR_TRANSFER_SDR_VIDEO
Definition: mediacodecdec_common.c:146
pts
static int64_t pts
Definition: transcode_aac.c:654
atomic_fetch_sub
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:137
AVRational::num
int num
Numerator.
Definition: rational.h:59
mediacodecdec_common.h
INPUT_DEQUEUE_TIMEOUT_US
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
Definition: mediacodecdec_common.c:84
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
COLOR_TRANSFER_ST2084
@ COLOR_TRANSFER_ST2084
Definition: mediacodecdec_common.c:147
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:961
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:206
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
color_formats
static const struct @107 color_formats[]
mcdec_map_color_format
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
Definition: mediacodecdec_common.c:196
ff_AMediaCodec_getName
static char * ff_AMediaCodec_getName(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:200
ff_AMediaCodec_getBufferFlagEndOfStream
static int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:297
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:256
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:594
info
MIPS optimizations info
Definition: mips.txt:2
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
decode.h
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:181
color_range
color_range
Definition: vf_selectivecolor.c:44
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AVCOL_PRI_UNSPECIFIED
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:536
AVCOL_PRI_BT470BG
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:540
mcdec_get_color_range
static enum AVColorRange mcdec_get_color_range(int color_range)
Definition: mediacodecdec_common.c:93
AV_PIX_FMT_MEDIACODEC
@ AV_PIX_FMT_MEDIACODEC
hardware decoding through MediaCodec
Definition: pixfmt.h:313
AVCOL_PRI_SMPTE170M
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:541
ff_AMediaCodec_getOutputFormat
static FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:262
ff_AMediaCodec_createCodecByName
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name, int ndk)
Definition: mediacodec_wrapper.c:2491
NULL
#define NULL
Definition: coverity.c:32
ff_AMediaCodec_flush
static int ff_AMediaCodec_flush(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:227
FFAMediaCodecBufferInfo
Definition: mediacodec_wrapper.h:148
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:982
AVMediaCodecContext
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
COLOR_TI_FormatYUV420PackedSemiPlanar
@ COLOR_TI_FormatYUV420PackedSemiPlanar
Definition: mediacodecdec_common.c:175
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:535
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:106
mediacodec_sw_buffer.h
ff_mediacodec_surface_unref
int ff_mediacodec_surface_unref(FFANativeWindow *window, void *log_ctx)
Definition: mediacodec_surface.c:59
time.h
ff_mediacodec_dec_ref
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:223
COLOR_FormatYUV420SemiPlanar
@ COLOR_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:169
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:627
index
int index
Definition: gxfenc.c:89
COLOR_STANDARD_BT601_PAL
@ COLOR_STANDARD_BT601_PAL
Definition: mediacodecdec_common.c:107
AVMediaCodecDeviceContext
MediaCodec details.
Definition: hwcontext_mediacodec.h:27
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
ff_AMediaFormat_toString
static char * ff_AMediaFormat_toString(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:91
AVCOL_PRI_BT2020
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:544
AMEDIAFORMAT_GET_INT32
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
Definition: mediacodecdec_common.c:425
ff_mediacodec_sw_buffer_copy_yuv420_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:131
mediacodec_dec_parse_format
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:436
ff_mediacodec_sw_buffer_copy_yuv420_planar
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
Definition: mediacodec_sw_buffer.c:76
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1450
AVPacket::size
int size
Definition: packet.h:375
AVCOL_TRC_SMPTEST2084
@ AVCOL_TRC_SMPTEST2084
Definition: pixfmt.h:576
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:1749
size
int size
Definition: twinvq_data.h:10344
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
pix_fmt
enum AVPixelFormat pix_fmt
Definition: mediacodecdec_common.c:182
height
#define height
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
ff_AMediaCodecProfile_getProfileFromAVCodecContext
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
Definition: mediacodec_wrapper.c:327
ff_AMediaCodec_createDecoderByType
FFAMediaCodec * ff_AMediaCodec_createDecoderByType(const char *mime_type, int ndk)
Definition: mediacodec_wrapper.c:2498
mediacodec_dec_flush_codec
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:541
mediacodec_buffer_release
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
Definition: mediacodecdec_common.c:254
COLOR_STANDARD_BT709
@ COLOR_STANDARD_BT709
Definition: mediacodecdec_common.c:106
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
COLOR_RANGE_LIMITED
@ COLOR_RANGE_LIMITED
Definition: mediacodecdec_common.c:90
mediacodec_wrap_sw_buffer
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:347
ff_mediacodec_dec_unref
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:228
mcdec_get_color_space
static enum AVColorSpace mcdec_get_color_space(int color_standard)
Definition: mediacodecdec_common.c:112
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
mediacodec_wrapper.h
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:367
COLOR_QCOM_FormatYUV420SemiPlanar
@ COLOR_QCOM_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:172
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:598
FFAMediaCodec
Definition: mediacodec_wrapper.h:157
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:587
mcdec_get_color_trc
static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
Definition: mediacodecdec_common.c:151
common.h
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:264
COLOR_TRANSFER_HLG
@ COLOR_TRANSFER_HLG
Definition: mediacodecdec_common.c:148
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1933
AVMediaCodecContext::surface
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
ff_mediacodec_surface_ref
FFANativeWindow * ff_mediacodec_surface_ref(void *surface, void *native_window, void *log_ctx)
Definition: mediacodec_surface.c:30
profile
int profile
Definition: mxfenc.c:2006
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:590
AVCodecContext::height
int height
Definition: avcodec.h:571
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:608
AVMediaCodecDeviceContext::native_window
void * native_window
Pointer to ANativeWindow.
Definition: hwcontext_mediacodec.h:44
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:644
ff_AMediaCodec_dequeueInputBuffer
static ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:247
avcodec.h
mcdec_get_color_pri
static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
Definition: mediacodecdec_common.c:128
COLOR_STANDARD_BT601_NTSC
@ COLOR_STANDARD_BT601_NTSC
Definition: mediacodecdec_common.c:108
COLOR_STANDARD_BT2020
@ COLOR_STANDARD_BT2020
Definition: mediacodecdec_common.c:109
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVCodecContext
main external API structure.
Definition: avcodec.h:398
AVCOL_TRC_ARIB_STD_B67
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:579
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
AVCOL_TRC_SMPTE170M
@ AVCOL_TRC_SMPTE170M
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Definition: pixfmt.h:565
color_format
int color_format
Definition: mediacodecdec_common.c:181
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:280
desc
const char * desc
Definition: libsvtav1.c:83
mem.h
ff_AMediaCodec_getOutputBuffer
static uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:242
mediacodec_wrap_hw_buffer
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:272
COLOR_QCOM_FormatYUV420SemiPlanar32m
@ COLOR_QCOM_FormatYUV420SemiPlanar32m
Definition: mediacodecdec_common.c:173
ff_AMediaCodec_cleanOutputBuffers
static int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:312
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVPacket
This structure stores compressed data.
Definition: packet.h:351
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
COLOR_FormatYUV420Planar
@ COLOR_FormatYUV420Planar
Definition: mediacodecdec_common.c:168
ff_AMediaCodec_infoTryAgainLater
static int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:277
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:571
ff_mediacodec_dec_send
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
Definition: mediacodecdec_common.c:680
timestamp.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
ff_AMediaCodec_releaseOutputBuffer
static int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
Definition: mediacodec_wrapper.h:267
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:589
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:626
FFAMediaFormat
Definition: mediacodec_wrapper.h:63
COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
@ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
Definition: mediacodecdec_common.c:174
ff_AMediaCodec_dequeueOutputBuffer
static ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:257
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:768
COLOR_FormatYCbYCr
@ COLOR_FormatYCbYCr
Definition: mediacodecdec_common.c:170
mediacodec.h
ff_mediacodec_dec_init
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:564