FFmpeg
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/common.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/log.h"
30 #include "libavutil/pixfmt.h"
31 #include "libavutil/time.h"
32 #include "libavutil/timestamp.h"
33 
34 #include "avcodec.h"
35 #include "internal.h"
36 
37 #include "mediacodec.h"
38 #include "mediacodec_surface.h"
39 #include "mediacodec_sw_buffer.h"
40 #include "mediacodec_wrapper.h"
41 #include "mediacodecdec_common.h"
42 
43 /**
44  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
45  * codec workarounds used in various place are taken from the Gstreamer
46  * project.
47  *
48  * Gstreamer references:
49  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
50  *
51  * Gstreamer copyright notice:
52  *
53  * Copyright (C) 2012, Collabora Ltd.
54  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
55  *
56  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
57  *
58  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
59  *
60  * Copyright (C) 2014-2015, Collabora Ltd.
61  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
62  *
63  * Copyright (C) 2015, Edward Hervey
64  * Author: Edward Hervey <bilboed@gmail.com>
65  *
66  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
67  *
68  * This library is free software; you can redistribute it and/or
69  * modify it under the terms of the GNU Lesser General Public
70  * License as published by the Free Software Foundation
71  * version 2.1 of the License.
72  *
73  * This library is distributed in the hope that it will be useful,
74  * but WITHOUT ANY WARRANTY; without even the implied warranty of
75  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
76  * Lesser General Public License for more details.
77  *
78  * You should have received a copy of the GNU Lesser General Public
79  * License along with this library; if not, write to the Free Software
80  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
81  *
82  */
83 
84 #define INPUT_DEQUEUE_TIMEOUT_US 8000
85 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
86 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
87 
88 enum {
91 };
92 
94 {
95  switch (color_range) {
96  case COLOR_RANGE_FULL:
97  return AVCOL_RANGE_JPEG;
99  return AVCOL_RANGE_MPEG;
100  default:
102  }
103 }
104 
105 enum {
110 };
111 
112 static enum AVColorSpace mcdec_get_color_space(int color_standard)
113 {
114  switch (color_standard) {
116  return AVCOL_SPC_BT709;
118  return AVCOL_SPC_BT470BG;
120  return AVCOL_SPC_SMPTE170M;
122  return AVCOL_SPC_BT2020_NCL;
123  default:
124  return AVCOL_SPC_UNSPECIFIED;
125  }
126 }
127 
128 static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
129 {
130  switch (color_standard) {
132  return AVCOL_PRI_BT709;
134  return AVCOL_PRI_BT470BG;
136  return AVCOL_PRI_SMPTE170M;
138  return AVCOL_PRI_BT2020;
139  default:
140  return AVCOL_PRI_UNSPECIFIED;
141  }
142 }
143 
144 enum {
149 };
150 
151 static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
152 {
153  switch (color_transfer) {
155  return AVCOL_TRC_LINEAR;
157  return AVCOL_TRC_SMPTE170M;
159  return AVCOL_TRC_SMPTEST2084;
160  case COLOR_TRANSFER_HLG:
161  return AVCOL_TRC_ARIB_STD_B67;
162  default:
163  return AVCOL_TRC_UNSPECIFIED;
164  }
165 }
166 
167 enum {
177 };
178 
179 static const struct {
180 
183 
184 } color_formats[] = {
185 
188  { COLOR_QCOM_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 },
189  { COLOR_QCOM_FormatYUV420SemiPlanar32m, AV_PIX_FMT_NV12 },
191  { COLOR_TI_FormatYUV420PackedSemiPlanar, AV_PIX_FMT_NV12 },
193  { 0 }
194 };
195 
198  int color_format)
199 {
200  int i;
202 
203  if (s->surface) {
204  return AV_PIX_FMT_MEDIACODEC;
205  }
206 
207  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
209  }
210 
211  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
212  if (color_formats[i].color_format == color_format) {
213  return color_formats[i].pix_fmt;
214  }
215  }
216 
217  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
218  color_format, color_format);
219 
220  return ret;
221 }
222 
224 {
225  atomic_fetch_add(&s->refcount, 1);
226 }
227 
229 {
230  if (!s)
231  return;
232 
233  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
234  if (s->codec) {
236  s->codec = NULL;
237  }
238 
239  if (s->format) {
241  s->format = NULL;
242  }
243 
244  if (s->surface) {
246  s->surface = NULL;
247  }
248 
249  av_freep(&s->codec_name);
250  av_freep(&s);
251  }
252 }
253 
254 static void mediacodec_buffer_release(void *opaque, uint8_t *data)
255 {
256  AVMediaCodecBuffer *buffer = opaque;
257  MediaCodecDecContext *ctx = buffer->ctx;
258  int released = atomic_load(&buffer->released);
259 
260  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
262  av_log(ctx->avctx, AV_LOG_DEBUG,
263  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
264  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
265  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
266  }
267 
268  if (ctx->delay_flush)
270  av_freep(&buffer);
271 }
272 
275  ssize_t index,
277  AVFrame *frame)
278 {
279  int ret = 0;
280  int status = 0;
281  AVMediaCodecBuffer *buffer = NULL;
282 
283  frame->buf[0] = NULL;
284  frame->width = avctx->width;
285  frame->height = avctx->height;
286  frame->format = avctx->pix_fmt;
288 
289  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
290  frame->pts = av_rescale_q(info->presentationTimeUs,
292  avctx->pkt_timebase);
293  } else {
294  frame->pts = info->presentationTimeUs;
295  }
296 #if FF_API_PKT_PTS
298  frame->pkt_pts = frame->pts;
300 #endif
301  frame->pkt_dts = AV_NOPTS_VALUE;
302  frame->color_range = avctx->color_range;
303  frame->color_primaries = avctx->color_primaries;
304  frame->color_trc = avctx->color_trc;
305  frame->colorspace = avctx->colorspace;
306 
307  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
308  if (!buffer) {
309  ret = AVERROR(ENOMEM);
310  goto fail;
311  }
312 
313  atomic_init(&buffer->released, 0);
314 
315  frame->buf[0] = av_buffer_create(NULL,
316  0,
318  buffer,
320 
321  if (!frame->buf[0]) {
322  ret = AVERROR(ENOMEM);
323  goto fail;
324 
325  }
326 
327  buffer->ctx = s;
328  buffer->serial = atomic_load(&s->serial);
329  if (s->delay_flush)
331 
332  buffer->index = index;
333  buffer->pts = info->presentationTimeUs;
334 
335  frame->data[3] = (uint8_t *)buffer;
336 
338  av_log(avctx, AV_LOG_DEBUG,
339  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
340  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
341 
342  return 0;
343 fail:
344  av_freep(buffer);
345  av_buffer_unref(&frame->buf[0]);
346  status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
347  if (status < 0) {
348  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
349  ret = AVERROR_EXTERNAL;
350  }
351 
352  return ret;
353 }
354 
357  uint8_t *data,
358  size_t size,
359  ssize_t index,
361  AVFrame *frame)
362 {
363  int ret = 0;
364  int status = 0;
365 
366  frame->width = avctx->width;
367  frame->height = avctx->height;
368  frame->format = avctx->pix_fmt;
369 
370  /* MediaCodec buffers needs to be copied to our own refcounted buffers
371  * because the flush command invalidates all input and output buffers.
372  */
373  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
374  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
375  goto done;
376  }
377 
378  /* Override frame->pkt_pts as ff_get_buffer will override its value based
379  * on the last avpacket received which is not in sync with the frame:
380  * * N avpackets can be pushed before 1 frame is actually returned
381  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
382  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
383  frame->pts = av_rescale_q(info->presentationTimeUs,
385  avctx->pkt_timebase);
386  } else {
387  frame->pts = info->presentationTimeUs;
388  }
389 #if FF_API_PKT_PTS
391  frame->pkt_pts = frame->pts;
393 #endif
394  frame->pkt_dts = AV_NOPTS_VALUE;
395 
396  av_log(avctx, AV_LOG_TRACE,
397  "Frame: width=%d stride=%d height=%d slice-height=%d "
398  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
399  "destination linesizes=%d,%d,%d\n" ,
400  avctx->width, s->stride, avctx->height, s->slice_height,
402  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
403 
404  switch (s->color_format) {
406  ff_mediacodec_sw_buffer_copy_yuv420_planar(avctx, s, data, size, info, frame);
407  break;
411  ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(avctx, s, data, size, info, frame);
412  break;
415  ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(avctx, s, data, size, info, frame);
416  break;
419  break;
420  default:
421  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
422  s->color_format, s->color_format);
423  ret = AVERROR(EINVAL);
424  goto done;
425  }
426 
427  ret = 0;
428 done:
429  status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
430  if (status < 0) {
431  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
432  ret = AVERROR_EXTERNAL;
433  }
434 
435  return ret;
436 }
437 
438 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
439  int32_t value = 0; \
440  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
441  (name) = value; \
442  } else if (mandatory) { \
443  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
444  ret = AVERROR_EXTERNAL; \
445  goto fail; \
446  } \
447 } while (0) \
448 
450 {
451  int ret = 0;
452  int width = 0;
453  int height = 0;
454  int color_range = 0;
455  int color_standard = 0;
456  int color_transfer = 0;
457  char *format = NULL;
458 
459  if (!s->format) {
460  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
461  return AVERROR(EINVAL);
462  }
463 
464  format = ff_AMediaFormat_toString(s->format);
465  if (!format) {
466  return AVERROR_EXTERNAL;
467  }
468  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
469 
470  /* Mandatory fields */
471  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
472  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
473 
474  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
475  s->stride = s->stride > 0 ? s->stride : s->width;
476 
477  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
478 
479  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
480  s->slice_height = FFALIGN(s->height, 16);
481  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
482  s->slice_height = avctx->height;
483  s->stride = avctx->width;
484  } else if (s->slice_height == 0) {
485  s->slice_height = s->height;
486  }
487 
488  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
489  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
490  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
491  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
492  ret = AVERROR(EINVAL);
493  goto fail;
494  }
495 
496  /* Optional fields */
497  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
498  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
499  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
500  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
501 
502  width = s->crop_right + 1 - s->crop_left;
503  height = s->crop_bottom + 1 - s->crop_top;
504 
505  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
506  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
507 
508  if (s->display_width && s->display_height) {
509  AVRational sar = av_div_q(
511  (AVRational){ width, height });
512  ff_set_sar(avctx, sar);
513  }
514 
515  AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0);
516  if (color_range)
517  avctx->color_range = mcdec_get_color_range(color_range);
518 
519  AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0);
520  if (color_standard) {
521  avctx->colorspace = mcdec_get_color_space(color_standard);
522  avctx->color_primaries = mcdec_get_color_pri(color_standard);
523  }
524 
525  AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0);
526  if (color_transfer)
527  avctx->color_trc = mcdec_get_color_trc(color_transfer);
528 
529  av_log(avctx, AV_LOG_INFO,
530  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
531  "resulting dimensions width=%d height=%d\n",
532  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
533  width, height);
534 
535  av_freep(&format);
536  return ff_set_dimensions(avctx, width, height);
537 fail:
538  av_freep(&format);
539  return ret;
540 }
541 
543 {
544  FFAMediaCodec *codec = s->codec;
545  int status;
546 
547  s->output_buffer_count = 0;
548 
549  s->draining = 0;
550  s->flushing = 0;
551  s->eos = 0;
552  atomic_fetch_add(&s->serial, 1);
554  s->current_input_buffer = -1;
555 
556  status = ff_AMediaCodec_flush(codec);
557  if (status < 0) {
558  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
559  return AVERROR_EXTERNAL;
560  }
561 
562  return 0;
563 }
564 
566  const char *mime, FFAMediaFormat *format)
567 {
568  int ret = 0;
569  int status;
570  int profile;
571 
572  enum AVPixelFormat pix_fmt;
573  static const enum AVPixelFormat pix_fmts[] = {
576  };
577 
578  s->avctx = avctx;
579  atomic_init(&s->refcount, 1);
581  atomic_init(&s->serial, 1);
582  s->current_input_buffer = -1;
583 
584  pix_fmt = ff_get_format(avctx, pix_fmts);
585  if (pix_fmt == AV_PIX_FMT_MEDIACODEC) {
586  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
587 
588  if (avctx->hw_device_ctx) {
589  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
590  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
591  if (device_ctx->hwctx) {
592  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
593  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, avctx);
594  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
595  }
596  }
597  }
598 
599  if (!s->surface && user_ctx && user_ctx->surface) {
600  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, avctx);
601  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
602  }
603  }
604 
606  if (profile < 0) {
607  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
608  }
609 
610  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
611  if (!s->codec_name) {
612  ret = AVERROR_EXTERNAL;
613  goto fail;
614  }
615 
616  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
618  if (!s->codec) {
619  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
620  ret = AVERROR_EXTERNAL;
621  goto fail;
622  }
623 
624  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
625  if (status < 0) {
626  char *desc = ff_AMediaFormat_toString(format);
627  av_log(avctx, AV_LOG_ERROR,
628  "Failed to configure codec %s (status = %d) with format %s\n",
629  s->codec_name, status, desc);
630  av_freep(&desc);
631 
632  ret = AVERROR_EXTERNAL;
633  goto fail;
634  }
635 
636  status = ff_AMediaCodec_start(s->codec);
637  if (status < 0) {
638  char *desc = ff_AMediaFormat_toString(format);
639  av_log(avctx, AV_LOG_ERROR,
640  "Failed to start codec %s (status = %d) with format %s\n",
641  s->codec_name, status, desc);
642  av_freep(&desc);
643  ret = AVERROR_EXTERNAL;
644  goto fail;
645  }
646 
648  if (s->format) {
649  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
650  av_log(avctx, AV_LOG_ERROR,
651  "Failed to configure context\n");
652  goto fail;
653  }
654  }
655 
656  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
657 
658  return 0;
659 
660 fail:
661  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
662  ff_mediacodec_dec_close(avctx, s);
663  return ret;
664 }
665 
667  AVPacket *pkt, bool wait)
668 {
669  int offset = 0;
670  int need_draining = 0;
671  uint8_t *data;
672  size_t size;
673  FFAMediaCodec *codec = s->codec;
674  int status;
675  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
676  int64_t pts;
677 
678  if (s->flushing) {
679  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
680  "until all output buffers have been released\n");
681  return AVERROR_EXTERNAL;
682  }
683 
684  if (pkt->size == 0) {
685  need_draining = 1;
686  }
687 
688  if (s->draining && s->eos) {
689  return AVERROR_EOF;
690  }
691 
692  while (offset < pkt->size || (need_draining && !s->draining)) {
693  ssize_t index = s->current_input_buffer;
694  if (index < 0) {
695  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
696  if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
697  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
698  break;
699  }
700 
701  if (index < 0) {
702  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
703  return AVERROR_EXTERNAL;
704  }
705  }
706  s->current_input_buffer = -1;
707 
708  data = ff_AMediaCodec_getInputBuffer(codec, index, &size);
709  if (!data) {
710  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
711  return AVERROR_EXTERNAL;
712  }
713 
714  pts = pkt->pts;
715  if (pts == AV_NOPTS_VALUE) {
716  av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n");
717  pts = 0;
718  }
719  if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
720  pts = av_rescale_q(pts, avctx->pkt_timebase, AV_TIME_BASE_Q);
721  }
722 
723  if (need_draining) {
725 
726  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
727 
728  status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, 0, pts, flags);
729  if (status < 0) {
730  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
731  return AVERROR_EXTERNAL;
732  }
733 
734  av_log(avctx, AV_LOG_TRACE,
735  "Queued empty EOS input buffer %zd with flags=%d\n", index, flags);
736 
737  s->draining = 1;
738  return 0;
739  }
740 
741  size = FFMIN(pkt->size - offset, size);
742  memcpy(data, pkt->data + offset, size);
743  offset += size;
744 
745  status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, size, pts, 0);
746  if (status < 0) {
747  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
748  return AVERROR_EXTERNAL;
749  }
750 
751  av_log(avctx, AV_LOG_TRACE,
752  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
753  }
754 
755  if (offset == 0)
756  return AVERROR(EAGAIN);
757  return offset;
758 }
759 
761  AVFrame *frame, bool wait)
762 {
763  int ret;
764  uint8_t *data;
765  ssize_t index;
766  size_t size;
767  FFAMediaCodec *codec = s->codec;
769  int status;
770  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
771 
772  if (s->draining && s->eos) {
773  return AVERROR_EOF;
774  }
775 
776  if (s->draining) {
777  /* If the codec is flushing or need to be flushed, block for a fair
778  * amount of time to ensure we got a frame */
779  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
780  } else if (s->output_buffer_count == 0 || !wait) {
781  /* If the codec hasn't produced any frames, do not block so we
782  * can push data to it as fast as possible, and get the first
783  * frame */
784  output_dequeue_timeout_us = 0;
785  }
786 
787  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
788  if (index >= 0) {
789  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
790  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
791  " flags=%" PRIu32 "\n", index, info.offset, info.size,
792  info.presentationTimeUs, info.flags);
793 
795  s->eos = 1;
796  }
797 
798  if (info.size) {
799  if (s->surface) {
800  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
801  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
802  return ret;
803  }
804  } else {
805  data = ff_AMediaCodec_getOutputBuffer(codec, index, &size);
806  if (!data) {
807  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
808  return AVERROR_EXTERNAL;
809  }
810 
811  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
812  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
813  return ret;
814  }
815  }
816 
817  s->output_buffer_count++;
818  return 0;
819  } else {
820  status = ff_AMediaCodec_releaseOutputBuffer(codec, index, 0);
821  if (status < 0) {
822  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
823  }
824  }
825 
826  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
827  char *format = NULL;
828 
829  if (s->format) {
830  status = ff_AMediaFormat_delete(s->format);
831  if (status < 0) {
832  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
833  }
834  }
835 
837  if (!s->format) {
838  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
839  return AVERROR_EXTERNAL;
840  }
841 
842  format = ff_AMediaFormat_toString(s->format);
843  if (!format) {
844  return AVERROR_EXTERNAL;
845  }
846  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
847  av_freep(&format);
848 
849  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
850  return ret;
851  }
852 
853  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
855  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
856  if (s->draining) {
857  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
858  "while draining remaining frames, output will probably lack frames\n",
859  output_dequeue_timeout_us / 1000);
860  } else {
861  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
862  }
863  } else {
864  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
865  return AVERROR_EXTERNAL;
866  }
867 
868  return AVERROR(EAGAIN);
869 }
870 
871 /*
872 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
873 * the codec (because the user retains frames). The codec stays in the
874 * flushing state.
875 *
876 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
877 * performed on the codec. The codec leaves the flushing state and can
878 * process again packets.
879 *
880 * ff_mediacodec_dec_flush returns a negative value if an error has
881 * occurred.
882 */
884 {
885  if (!s->surface || atomic_load(&s->refcount) == 1) {
886  int ret;
887 
888  /* No frames (holding a reference to the codec) are retained by the
889  * user, thus we can flush the codec and returns accordingly */
890  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
891  return ret;
892  }
893 
894  return 1;
895  }
896 
897  s->flushing = 1;
898  return 0;
899 }
900 
902 {
904 
905  return 0;
906 }
907 
909 {
910  return s->flushing;
911 }
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:514
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
#define NULL
Definition: coverity.c:32
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1292
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
This structure describes decoded (raw) audio or video data.
Definition: frame.h:308
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
"Linear transfer characteristics"
Definition: pixfmt.h:492
const char * desc
Definition: libsvtav1.c:79
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
Memory handling functions.
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:106
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:499
static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:518
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1161
int num
Numerator.
Definition: rational.h:59
int size
Definition: packet.h:364
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel...
Definition: avcodec.h:905
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:519
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:736
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
color_range
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
static AVPacket pkt
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate.The lists are not just lists
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:483
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
int ff_AMediaCodec_flush(FFAMediaCodec *codec)
int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
uint8_t
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:1702
timestamp utils, mostly useful for debugging/logging purposes
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:512
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:220
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
FFANativeWindow * ff_mediacodec_surface_ref(void *surface, void *log_ctx)
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:401
static enum AVColorRange mcdec_get_color_range(int color_range)
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
void * surface
android/view/Surface handle, to be filled by the user.
#define height
int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
uint8_t * data
Definition: packet.h:363
static const struct @107 color_formats[]
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:121
#define AVERROR_EOF
End of file.
Definition: error.h:55
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
char * ff_AMediaFormat_toString(FFAMediaFormat *format)
AVColorRange
MPEG vs JPEG YUV range.
Definition: pixfmt.h:535
ptrdiff_t size
Definition: opengl_enc.c:100
int color_format
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:458
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:2083
int width
Definition: frame.h:366
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
#define atomic_load(object)
Definition: stdatomic.h:93
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:552
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
int ff_mediacodec_surface_unref(FFANativeWindow *window, void *log_ctx)
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:563
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B
Definition: pixfmt.h:460
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:29
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:137
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
#define fail()
Definition: checkasm.h:123
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
MIPS optimizations info
Definition: mips.txt:2
int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
#define FFMIN(a, b)
Definition: common.h:96
ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
#define width
int width
picture width / height.
Definition: avcodec.h:699
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
int ff_AMediaCodec_delete(FFAMediaCodec *codec)
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:523
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:465
AVFormatContext * ctx
Definition: movenc.c:48
int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:1140
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
#define s(width, name)
Definition: cbs_vp9.c:257
static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
#define FF_ARRAY_ELEMS(a)
the normal 2^n-1 "JPEG" YUV ranges
Definition: pixfmt.h:538
ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name)
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:381
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
#define AV_LOG_INFO
Standard information.
Definition: log.h:205
int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Definition: pixfmt.h:490
int ff_AMediaCodec_start(FFAMediaCodec *codec)
Libavcodec external API header.
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:260
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:339
main external API structure.
Definition: avcodec.h:526
uint8_t * data
The data buffer.
Definition: buffer.h:89
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:396
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1872
int index
Definition: gxfenc.c:89
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1154
Rational number (pair of numerator and denominator).
Definition: rational.h:58
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:1147
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
static enum AVColorSpace mcdec_get_color_space(int color_standard)
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:300
int ff_AMediaFormat_delete(FFAMediaFormat *format)
mfxU16 profile
Definition: qsvenc.c:45
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
hardware decoding through MediaCodec
Definition: pixfmt.h:293
static int64_t pts
#define flags(name, subs,...)
Definition: cbs_av1.c:560
uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:322
attribute_deprecated int64_t pkt_pts
PTS copied from the AVPacket that was decoded to produce this frame.
Definition: frame.h:409
the normal 219*2^(n-8) "MPEG" YUV ranges
Definition: pixfmt.h:537
int64_t pkt_dts
DTS copied from the AVPacket that triggered returning this frame.
Definition: frame.h:417
int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, void *surface, void *crypto, uint32_t flags)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
common internal api header.
common internal and external API header
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
int den
Denominator.
Definition: rational.h:60
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:504
pixel format definitions
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
#define OUTPUT_DEQUEUE_TIMEOUT_US
FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
enum AVColorPrimaries color_primaries
Definition: frame.h:554
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:466
ITU-R BT2020.
Definition: pixfmt.h:469
int height
Definition: frame.h:366
#define atomic_init(obj, value)
Definition: stdatomic.h:33
#define av_freep(p)
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:556
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
enum AVPixelFormat pix_fmt
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:2278
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
This structure stores compressed data.
Definition: packet.h:340
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:356
int i
Definition: input.c:407
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
GLuint buffer
Definition: opengl_enc.c:101