FFmpeg
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/common.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/log.h"
30 #include "libavutil/pixfmt.h"
31 #include "libavutil/time.h"
32 #include "libavutil/timestamp.h"
33 
34 #include "avcodec.h"
35 #include "internal.h"
36 
37 #include "mediacodec.h"
38 #include "mediacodec_surface.h"
39 #include "mediacodec_sw_buffer.h"
40 #include "mediacodec_wrapper.h"
41 #include "mediacodecdec_common.h"
42 
43 /**
44  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
45  * codec workarounds used in various place are taken from the Gstreamer
46  * project.
47  *
48  * Gstreamer references:
49  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
50  *
51  * Gstreamer copyright notice:
52  *
53  * Copyright (C) 2012, Collabora Ltd.
54  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
55  *
56  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
57  *
58  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
59  *
60  * Copyright (C) 2014-2015, Collabora Ltd.
61  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
62  *
63  * Copyright (C) 2015, Edward Hervey
64  * Author: Edward Hervey <bilboed@gmail.com>
65  *
66  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
67  *
68  * This library is free software; you can redistribute it and/or
69  * modify it under the terms of the GNU Lesser General Public
70  * License as published by the Free Software Foundation
71  * version 2.1 of the License.
72  *
73  * This library is distributed in the hope that it will be useful,
74  * but WITHOUT ANY WARRANTY; without even the implied warranty of
75  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
76  * Lesser General Public License for more details.
77  *
78  * You should have received a copy of the GNU Lesser General Public
79  * License along with this library; if not, write to the Free Software
80  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
81  *
82  */
83 
84 #define INPUT_DEQUEUE_TIMEOUT_US 8000
85 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
86 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
87 
88 enum {
98 };
99 
100 static const struct {
101 
104 
105 } color_formats[] = {
106 
114  { 0 }
115 };
116 
119  int color_format)
120 {
121  int i;
123 
124  if (s->surface) {
125  return AV_PIX_FMT_MEDIACODEC;
126  }
127 
128  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
130  }
131 
132  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
134  return color_formats[i].pix_fmt;
135  }
136  }
137 
138  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
140 
141  return ret;
142 }
143 
145 {
146  atomic_fetch_add(&s->refcount, 1);
147 }
148 
150 {
151  if (!s)
152  return;
153 
154  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
155  if (s->codec) {
156  ff_AMediaCodec_delete(s->codec);
157  s->codec = NULL;
158  }
159 
160  if (s->format) {
161  ff_AMediaFormat_delete(s->format);
162  s->format = NULL;
163  }
164 
165  if (s->surface) {
167  s->surface = NULL;
168  }
169 
170  av_freep(&s->codec_name);
171  av_freep(&s);
172  }
173 }
174 
175 static void mediacodec_buffer_release(void *opaque, uint8_t *data)
176 {
177  AVMediaCodecBuffer *buffer = opaque;
179  int released = atomic_load(&buffer->released);
180 
181  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
182  atomic_fetch_sub(&ctx->hw_buffer_count, 1);
183  av_log(ctx->avctx, AV_LOG_DEBUG,
184  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
185  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
186  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
187  }
188 
189  if (ctx->delay_flush)
191  av_freep(&buffer);
192 }
193 
196  ssize_t index,
198  AVFrame *frame)
199 {
200  int ret = 0;
201  int status = 0;
202  AVMediaCodecBuffer *buffer = NULL;
203 
204  frame->buf[0] = NULL;
205  frame->width = avctx->width;
206  frame->height = avctx->height;
207  frame->format = avctx->pix_fmt;
208  frame->sample_aspect_ratio = avctx->sample_aspect_ratio;
209 
210  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
211  frame->pts = av_rescale_q(info->presentationTimeUs,
213  avctx->pkt_timebase);
214  } else {
215  frame->pts = info->presentationTimeUs;
216  }
217 #if FF_API_PKT_PTS
219  frame->pkt_pts = frame->pts;
221 #endif
222  frame->pkt_dts = AV_NOPTS_VALUE;
223 
224  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
225  if (!buffer) {
226  ret = AVERROR(ENOMEM);
227  goto fail;
228  }
229 
230  atomic_init(&buffer->released, 0);
231 
232  frame->buf[0] = av_buffer_create(NULL,
233  0,
235  buffer,
237 
238  if (!frame->buf[0]) {
239  ret = AVERROR(ENOMEM);
240  goto fail;
241 
242  }
243 
244  buffer->ctx = s;
245  buffer->serial = atomic_load(&s->serial);
246  if (s->delay_flush)
248 
249  buffer->index = index;
250  buffer->pts = info->presentationTimeUs;
251 
252  frame->data[3] = (uint8_t *)buffer;
253 
254  atomic_fetch_add(&s->hw_buffer_count, 1);
255  av_log(avctx, AV_LOG_DEBUG,
256  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
257  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
258 
259  return 0;
260 fail:
261  av_freep(buffer);
262  av_buffer_unref(&frame->buf[0]);
264  if (status < 0) {
265  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
267  }
268 
269  return ret;
270 }
271 
274  uint8_t *data,
275  size_t size,
276  ssize_t index,
278  AVFrame *frame)
279 {
280  int ret = 0;
281  int status = 0;
282 
283  frame->width = avctx->width;
284  frame->height = avctx->height;
285  frame->format = avctx->pix_fmt;
286 
287  /* MediaCodec buffers needs to be copied to our own refcounted buffers
288  * because the flush command invalidates all input and output buffers.
289  */
290  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
291  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
292  goto done;
293  }
294 
295  /* Override frame->pkt_pts as ff_get_buffer will override its value based
296  * on the last avpacket received which is not in sync with the frame:
297  * * N avpackets can be pushed before 1 frame is actually returned
298  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
299  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
300  frame->pts = av_rescale_q(info->presentationTimeUs,
302  avctx->pkt_timebase);
303  } else {
304  frame->pts = info->presentationTimeUs;
305  }
306 #if FF_API_PKT_PTS
308  frame->pkt_pts = frame->pts;
310 #endif
311  frame->pkt_dts = AV_NOPTS_VALUE;
312 
313  av_log(avctx, AV_LOG_TRACE,
314  "Frame: width=%d stride=%d height=%d slice-height=%d "
315  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
316  "destination linesizes=%d,%d,%d\n" ,
317  avctx->width, s->stride, avctx->height, s->slice_height,
318  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
319  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
320 
321  switch (s->color_format) {
324  break;
329  break;
333  break;
336  break;
337  default:
338  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
339  s->color_format, s->color_format);
340  ret = AVERROR(EINVAL);
341  goto done;
342  }
343 
344  ret = 0;
345 done:
347  if (status < 0) {
348  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
350  }
351 
352  return ret;
353 }
354 
355 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
356  int32_t value = 0; \
357  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
358  (name) = value; \
359  } else if (mandatory) { \
360  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
361  ret = AVERROR_EXTERNAL; \
362  goto fail; \
363  } \
364 } while (0) \
365 
367 {
368  int ret = 0;
369  int width = 0;
370  int height = 0;
371  char *format = NULL;
372 
373  if (!s->format) {
374  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
375  return AVERROR(EINVAL);
376  }
377 
378  format = ff_AMediaFormat_toString(s->format);
379  if (!format) {
380  return AVERROR_EXTERNAL;
381  }
382  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
383 
384  /* Mandatory fields */
385  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
386  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
387 
388  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
389  s->stride = s->stride > 0 ? s->stride : s->width;
390 
391  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
392 
393  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
394  s->slice_height = FFALIGN(s->height, 16);
395  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
396  s->slice_height = avctx->height;
397  s->stride = avctx->width;
398  } else if (s->slice_height == 0) {
399  s->slice_height = s->height;
400  }
401 
402  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
403  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
404  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
405  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
406  ret = AVERROR(EINVAL);
407  goto fail;
408  }
409 
410  /* Optional fields */
411  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
412  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
413  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
414  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
415 
416  width = s->crop_right + 1 - s->crop_left;
417  height = s->crop_bottom + 1 - s->crop_top;
418 
419  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
420  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
421 
422  if (s->display_width && s->display_height) {
423  AVRational sar = av_div_q(
424  (AVRational){ s->display_width, s->display_height },
425  (AVRational){ width, height });
426  ff_set_sar(avctx, sar);
427  }
428 
429  av_log(avctx, AV_LOG_INFO,
430  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
431  "resulting dimensions width=%d height=%d\n",
432  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
433  width, height);
434 
435  av_freep(&format);
436  return ff_set_dimensions(avctx, width, height);
437 fail:
438  av_freep(&format);
439  return ret;
440 }
441 
443 {
444  FFAMediaCodec *codec = s->codec;
445  int status;
446 
447  s->output_buffer_count = 0;
448 
449  s->draining = 0;
450  s->flushing = 0;
451  s->eos = 0;
452  atomic_fetch_add(&s->serial, 1);
453  atomic_init(&s->hw_buffer_count, 0);
454  s->current_input_buffer = -1;
455 
456  status = ff_AMediaCodec_flush(codec);
457  if (status < 0) {
458  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
459  return AVERROR_EXTERNAL;
460  }
461 
462  return 0;
463 }
464 
466  const char *mime, FFAMediaFormat *format)
467 {
468  int ret = 0;
469  int status;
470  int profile;
471 
472  enum AVPixelFormat pix_fmt;
473  static const enum AVPixelFormat pix_fmts[] = {
476  };
477 
478  s->avctx = avctx;
479  atomic_init(&s->refcount, 1);
480  atomic_init(&s->hw_buffer_count, 0);
481  atomic_init(&s->serial, 1);
482  s->current_input_buffer = -1;
483 
484  pix_fmt = ff_get_format(avctx, pix_fmts);
486  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
487 
488  if (avctx->hw_device_ctx) {
489  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
490  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
491  if (device_ctx->hwctx) {
492  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
493  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, avctx);
494  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
495  }
496  }
497  }
498 
499  if (!s->surface && user_ctx && user_ctx->surface) {
500  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, avctx);
501  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
502  }
503  }
504 
506  if (profile < 0) {
507  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
508  }
509 
510  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
511  if (!s->codec_name) {
513  goto fail;
514  }
515 
516  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
517  s->codec = ff_AMediaCodec_createCodecByName(s->codec_name);
518  if (!s->codec) {
519  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
521  goto fail;
522  }
523 
524  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
525  if (status < 0) {
527  av_log(avctx, AV_LOG_ERROR,
528  "Failed to configure codec (status = %d) with format %s\n",
529  status, desc);
530  av_freep(&desc);
531 
533  goto fail;
534  }
535 
536  status = ff_AMediaCodec_start(s->codec);
537  if (status < 0) {
539  av_log(avctx, AV_LOG_ERROR,
540  "Failed to start codec (status = %d) with format %s\n",
541  status, desc);
542  av_freep(&desc);
544  goto fail;
545  }
546 
547  s->format = ff_AMediaCodec_getOutputFormat(s->codec);
548  if (s->format) {
549  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
550  av_log(avctx, AV_LOG_ERROR,
551  "Failed to configure context\n");
552  goto fail;
553  }
554  }
555 
556  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
557 
558  return 0;
559 
560 fail:
561  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
562  ff_mediacodec_dec_close(avctx, s);
563  return ret;
564 }
565 
567  AVPacket *pkt, bool wait)
568 {
569  int offset = 0;
570  int need_draining = 0;
571  uint8_t *data;
572  ssize_t index = s->current_input_buffer;
573  size_t size;
574  FFAMediaCodec *codec = s->codec;
575  int status;
576  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
577  int64_t pts;
578 
579  if (s->flushing) {
580  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
581  "until all output buffers have been released\n");
582  return AVERROR_EXTERNAL;
583  }
584 
585  if (pkt->size == 0) {
586  need_draining = 1;
587  }
588 
589  if (s->draining && s->eos) {
590  return AVERROR_EOF;
591  }
592 
593  while (offset < pkt->size || (need_draining && !s->draining)) {
594  if (index < 0) {
595  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
597  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
598  break;
599  }
600 
601  if (index < 0) {
602  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
603  return AVERROR_EXTERNAL;
604  }
605  }
606  s->current_input_buffer = -1;
607 
609  if (!data) {
610  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
611  return AVERROR_EXTERNAL;
612  }
613 
614  pts = pkt->pts;
615  if (pts != AV_NOPTS_VALUE && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
617  }
618 
619  if (need_draining) {
621 
622  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
623 
625  if (status < 0) {
626  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
627  return AVERROR_EXTERNAL;
628  }
629 
630  av_log(avctx, AV_LOG_TRACE,
631  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
632 
633  s->draining = 1;
634  return 0;
635  }
636 
637  size = FFMIN(pkt->size - offset, size);
638  memcpy(data, pkt->data + offset, size);
639  offset += size;
640 
642  if (status < 0) {
643  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
644  return AVERROR_EXTERNAL;
645  }
646 
647  av_log(avctx, AV_LOG_TRACE,
648  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
649  }
650 
651  if (offset == 0)
652  return AVERROR(EAGAIN);
653  return offset;
654 }
655 
657  AVFrame *frame, bool wait)
658 {
659  int ret;
660  uint8_t *data;
661  ssize_t index;
662  size_t size;
663  FFAMediaCodec *codec = s->codec;
665  int status;
666  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
667 
668  if (s->draining && s->eos) {
669  return AVERROR_EOF;
670  }
671 
672  if (s->draining) {
673  /* If the codec is flushing or need to be flushed, block for a fair
674  * amount of time to ensure we got a frame */
675  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
676  } else if (s->output_buffer_count == 0 || !wait) {
677  /* If the codec hasn't produced any frames, do not block so we
678  * can push data to it as fast as possible, and get the first
679  * frame */
680  output_dequeue_timeout_us = 0;
681  }
682 
683  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
684  if (index >= 0) {
685  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
686  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
687  " flags=%" PRIu32 "\n", index, info.offset, info.size,
688  info.presentationTimeUs, info.flags);
689 
690  if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) {
691  s->eos = 1;
692  }
693 
694  if (info.size) {
695  if (s->surface) {
696  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
697  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
698  return ret;
699  }
700  } else {
702  if (!data) {
703  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
704  return AVERROR_EXTERNAL;
705  }
706 
707  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
708  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
709  return ret;
710  }
711  }
712 
713  s->output_buffer_count++;
714  return 0;
715  } else {
717  if (status < 0) {
718  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
719  }
720  }
721 
722  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
723  char *format = NULL;
724 
725  if (s->format) {
726  status = ff_AMediaFormat_delete(s->format);
727  if (status < 0) {
728  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
729  }
730  }
731 
732  s->format = ff_AMediaCodec_getOutputFormat(codec);
733  if (!s->format) {
734  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
735  return AVERROR_EXTERNAL;
736  }
737 
738  format = ff_AMediaFormat_toString(s->format);
739  if (!format) {
740  return AVERROR_EXTERNAL;
741  }
742  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
743  av_freep(&format);
744 
745  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
746  return ret;
747  }
748 
749  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
751  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
752  if (s->draining) {
753  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
754  "while draining remaining frames, output will probably lack frames\n",
755  output_dequeue_timeout_us / 1000);
756  } else {
757  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
758  }
759  } else {
760  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
761  return AVERROR_EXTERNAL;
762  }
763 
764  return AVERROR(EAGAIN);
765 }
766 
767 /*
768 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
769 * the codec (because the user retains frames). The codec stays in the
770 * flushing state.
771 *
772 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
773 * performed on the codec. The codec leaves the flushing state and can
774 * process again packets.
775 *
776 * ff_mediacodec_dec_flush returns a negative value if an error has
777 * occurred.
778 */
780 {
781  if (!s->surface || atomic_load(&s->refcount) == 1) {
782  int ret;
783 
784  /* No frames (holding a reference to the codec) are retained by the
785  * user, thus we can flush the codec and returns accordingly */
786  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
787  return ret;
788  }
789 
790  return 1;
791  }
792 
793  s->flushing = 1;
794  return 0;
795 }
796 
798 {
800 
801  return 0;
802 }
803 
805 {
806  return s->flushing;
807 }
COLOR_FormatYUV420Planar
@ COLOR_FormatYUV420Planar
Definition: mediacodecdec_common.c:89
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
AVCodecContext::hwaccel_context
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:2741
ff_AMediaCodecList_getCodecNameByType
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
Definition: mediacodec_wrapper.c:382
MediaCodecDecContext
Definition: mediacodecdec_common.h:37
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
COLOR_FormatAndroidOpaque
@ COLOR_FormatAndroidOpaque
Definition: mediacodecdec_common.c:92
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1371
mediacodec_surface.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
ff_mediacodec_surface_ref
void * ff_mediacodec_surface_ref(void *surface, void *log_ctx)
Definition: mediacodec_surface.c:28
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:89
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
AV_TIME_BASE_Q
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:260
AVMediaCodecDeviceContext::surface
void * surface
android/view/Surface handle, to be filled by the user.
Definition: hwcontext_mediacodec.h:33
OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
Definition: mediacodecdec_common.c:86
ff_mediacodec_dec_close
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:797
profile
mfxU16 profile
Definition: qsvenc.c:44
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:388
ff_mediacodec_dec_receive
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
Definition: mediacodecdec_common.c:656
internal.h
AVPacket::data
uint8_t * data
Definition: avcodec.h:1477
COLOR_FormatYCbYCr
@ COLOR_FormatYCbYCr
Definition: mediacodecdec_common.c:91
OUTPUT_DEQUEUE_TIMEOUT_US
#define OUTPUT_DEQUEUE_TIMEOUT_US
Definition: mediacodecdec_common.c:85
data
const char data[16]
Definition: mxf.c:91
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
ff_mediacodec_dec_is_flushing
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:804
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
ff_AMediaCodec_releaseOutputBuffer
int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
Definition: mediacodec_wrapper.c:1416
hwcontext_mediacodec.h
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:272
fail
#define fail()
Definition: checkasm.h:120
ff_AMediaCodec_infoOutputFormatChanged
int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.c:1648
ff_AMediaFormat_toString
char * ff_AMediaFormat_toString(FFAMediaFormat *format)
Definition: mediacodec_wrapper.c:714
COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
@ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
Definition: mediacodecdec_common.c:95
ff_AMediaCodec_flush
int ff_AMediaCodec_flush(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.c:1399
ff_mediacodec_dec_flush
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:779
pts
static int64_t pts
Definition: transcode_aac.c:647
ff_mediacodec_surface_unref
int ff_mediacodec_surface_unref(void *surface, void *log_ctx)
Definition: mediacodec_surface.c:44
atomic_fetch_sub
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:137
AVRational::num
int num
Numerator.
Definition: rational.h:59
mediacodecdec_common.h
INPUT_DEQUEUE_TIMEOUT_US
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
Definition: mediacodecdec_common.c:84
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
ff_AMediaCodec_infoOutputBuffersChanged
int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.c:1643
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:202
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
mcdec_map_color_format
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
Definition: mediacodecdec_common.c:117
width
#define width
s
#define s(width, name)
Definition: cbs_vp9.c:257
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
color_formats
static const struct @118 color_formats[]
info
MIPS optimizations info
Definition: mips.txt:2
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:181
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AV_PIX_FMT_MEDIACODEC
@ AV_PIX_FMT_MEDIACODEC
hardware decoding through MediaCodec
Definition: pixfmt.h:293
ff_AMediaCodec_queueInputBuffer
int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
Definition: mediacodec_wrapper.c:1467
ff_AMediaCodec_getInputBuffer
uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.c:1519
NULL
#define NULL
Definition: coverity.c:32
FFAMediaCodecBufferInfo
Definition: mediacodec_wrapper.h:85
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
AVMediaCodecContext
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
mediacodec_sw_buffer.h
time.h
ff_mediacodec_dec_ref
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:144
ff_AMediaCodec_getBufferFlagEndOfStream
int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.c:1658
index
int index
Definition: gxfenc.c:89
AVMediaCodecDeviceContext
MediaCodec details.
Definition: hwcontext_mediacodec.h:27
ff_AMediaCodec_dequeueOutputBuffer
ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
Definition: mediacodec_wrapper.c:1484
COLOR_QCOM_FormatYUV420SemiPlanar32m
@ COLOR_QCOM_FormatYUV420SemiPlanar32m
Definition: mediacodecdec_common.c:94
AMEDIAFORMAT_GET_INT32
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
Definition: mediacodecdec_common.c:355
ff_mediacodec_sw_buffer_copy_yuv420_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:131
mediacodec_dec_parse_format
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:366
ff_mediacodec_sw_buffer_copy_yuv420_planar
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
Definition: mediacodec_sw_buffer.c:76
desc
const char * desc
Definition: nvenc.c:68
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1965
AVPacket::size
int size
Definition: avcodec.h:1478
ff_AMediaCodec_createCodecByName
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name)
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:3119
size
int size
Definition: twinvq_data.h:11134
ff_AMediaCodec_start
int ff_AMediaCodec_start(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.c:1365
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
ff_AMediaCodec_delete
int ff_AMediaCodec_delete(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.c:1289
pix_fmt
enum AVPixelFormat pix_fmt
Definition: mediacodecdec_common.c:103
height
#define height
FFMIN
#define FFMIN(a, b)
Definition: common.h:96
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
ff_AMediaCodecProfile_getProfileFromAVCodecContext
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
Definition: mediacodec_wrapper.c:313
mediacodec_dec_flush_codec
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:442
mediacodec_buffer_release
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
Definition: mediacodecdec_common.c:175
COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
@ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
Definition: mediacodecdec_common.c:97
ff_AMediaCodec_dequeueInputBuffer
ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
Definition: mediacodec_wrapper.c:1450
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
mediacodec_wrap_sw_buffer
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:272
ff_mediacodec_dec_unref
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:149
ff_AMediaCodec_infoTryAgainLater
int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.c:1638
COLOR_QCOM_FormatYUV420SemiPlanar
@ COLOR_QCOM_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:93
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:259
mediacodec_wrapper.h
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1470
ff_AMediaCodec_cleanOutputBuffers
int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.c:1673
FFAMediaCodec
Definition: mediacodec_wrapper.c:274
ff_AMediaCodec_getOutputBuffer
uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.c:1567
common.h
uint8_t
uint8_t
Definition: audio_convert.c:194
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:3314
AVMediaCodecContext::surface
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
AVCodecContext::height
int height
Definition: avcodec.h:1738
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1775
avcodec.h
ff_AMediaCodec_configure
int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, void *surface, void *crypto, uint32_t flags)
Definition: mediacodec_wrapper.c:1348
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:78
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen_template.c:38
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:119
AVCodecContext
main external API structure.
Definition: avcodec.h:1565
pkt
static AVPacket pkt
Definition: demuxing_decoding.c:54
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
ff_AMediaFormat_delete
int ff_AMediaFormat_delete(FFAMediaFormat *format)
Definition: mediacodec_wrapper.c:692
ff_AMediaCodec_getOutputFormat
FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.c:1615
COLOR_TI_FormatYUV420PackedSemiPlanar
@ COLOR_TI_FormatYUV420PackedSemiPlanar
Definition: mediacodecdec_common.c:96
color_format
int color_format
Definition: mediacodecdec_common.c:102
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
COLOR_FormatYUV420SemiPlanar
@ COLOR_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:90
mem.h
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:104
mediacodec_wrap_hw_buffer
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:194
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:48
AVPacket
This structure stores compressed data.
Definition: avcodec.h:1454
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:1738
ff_mediacodec_dec_send
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
Definition: mediacodecdec_common.c:566
timestamp.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:565
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
FFAMediaFormat
Definition: mediacodec_wrapper.c:162
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:1944
mediacodec.h
ff_mediacodec_dec_init
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:465