FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
libstagefright.cpp
Go to the documentation of this file.
1 /*
2  * Interface to the Android Stagefright library for
3  * H/W accelerated H.264 decoding
4  *
5  * Copyright (C) 2011 Mohamed Naufal
6  * Copyright (C) 2011 Martin Storsjö
7  *
8  * This file is part of FFmpeg.
9  *
10  * FFmpeg is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Lesser General Public
12  * License as published by the Free Software Foundation; either
13  * version 2.1 of the License, or (at your option) any later version.
14  *
15  * FFmpeg is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18  * Lesser General Public License for more details.
19  *
20  * You should have received a copy of the GNU Lesser General Public
21  * License along with FFmpeg; if not, write to the Free Software
22  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23  */
24 
25 #include <binder/ProcessState.h>
26 #include <media/stagefright/MetaData.h>
27 #include <media/stagefright/MediaBufferGroup.h>
28 #include <media/stagefright/MediaDebug.h>
29 #include <media/stagefright/MediaDefs.h>
30 #include <media/stagefright/OMXClient.h>
31 #include <media/stagefright/OMXCodec.h>
32 #include <utils/List.h>
33 #include <new>
34 #include <map>
35 
36 extern "C" {
37 #include "avcodec.h"
38 #include "libavutil/imgutils.h"
39 #include "internal.h"
40 }
41 
42 #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
43 
44 using namespace android;
45 
46 struct Frame {
47  status_t status;
48  size_t size;
49  int64_t time;
50  int key;
53 };
54 
55 struct TimeStamp {
56  int64_t pts;
58 };
59 
60 class CustomSource;
61 
67  sp<MediaSource> *source;
68  List<Frame*> *in_queue, *out_queue;
72 
75  volatile sig_atomic_t thread_started, thread_exited, stop_decode;
76 
78  std::map<int64_t, TimeStamp> *ts_map;
79  int64_t frame_index;
80 
83 
84  OMXClient *client;
85  sp<MediaSource> *decoder;
86  const char *decoder_component;
87 };
88 
89 class CustomSource : public MediaSource {
90 public:
91  CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
92  s = (StagefrightContext*)avctx->priv_data;
93  source_meta = meta;
94  frame_size = (avctx->width * avctx->height * 3) / 2;
95  buf_group.add_buffer(new MediaBuffer(frame_size));
96  }
97 
98  virtual sp<MetaData> getFormat() {
99  return source_meta;
100  }
101 
102  virtual status_t start(MetaData *params) {
103  return OK;
104  }
105 
106  virtual status_t stop() {
107  return OK;
108  }
109 
110  virtual status_t read(MediaBuffer **buffer,
111  const MediaSource::ReadOptions *options) {
112  Frame *frame;
113  status_t ret;
114 
115  if (s->thread_exited)
116  return ERROR_END_OF_STREAM;
117  pthread_mutex_lock(&s->in_mutex);
118 
119  while (s->in_queue->empty())
120  pthread_cond_wait(&s->condition, &s->in_mutex);
121 
122  frame = *s->in_queue->begin();
123  ret = frame->status;
124 
125  if (ret == OK) {
126  ret = buf_group.acquire_buffer(buffer);
127  if (ret == OK) {
128  memcpy((*buffer)->data(), frame->buffer, frame->size);
129  (*buffer)->set_range(0, frame->size);
130  (*buffer)->meta_data()->clear();
131  (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
132  (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
133  } else {
134  av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
135  }
136  av_freep(&frame->buffer);
137  }
138 
139  s->in_queue->erase(s->in_queue->begin());
140  pthread_mutex_unlock(&s->in_mutex);
141 
142  av_freep(&frame);
143  return ret;
144  }
145 
146 private:
147  MediaBufferGroup buf_group;
148  sp<MetaData> source_meta;
151 };
152 
153 void* decode_thread(void *arg)
154 {
155  AVCodecContext *avctx = (AVCodecContext*)arg;
157  const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(avctx->pix_fmt);
158  Frame* frame;
159  MediaBuffer *buffer;
160  int32_t w, h;
161  int decode_done = 0;
162  int ret;
163  int src_linesize[3];
164  const uint8_t *src_data[3];
165  int64_t out_frame_index = 0;
166 
167  do {
168  buffer = NULL;
169  frame = (Frame*)av_mallocz(sizeof(Frame));
170  if (!frame) {
171  frame = s->end_frame;
172  frame->status = AVERROR(ENOMEM);
173  decode_done = 1;
174  s->end_frame = NULL;
175  goto push_frame;
176  }
177  frame->status = (*s->decoder)->read(&buffer);
178  if (frame->status == OK) {
179  sp<MetaData> outFormat = (*s->decoder)->getFormat();
180  outFormat->findInt32(kKeyWidth , &w);
181  outFormat->findInt32(kKeyHeight, &h);
182  frame->vframe = av_frame_alloc();
183  if (!frame->vframe) {
184  frame->status = AVERROR(ENOMEM);
185  decode_done = 1;
186  buffer->release();
187  goto push_frame;
188  }
189  ret = ff_get_buffer(avctx, frame->vframe, AV_GET_BUFFER_FLAG_REF);
190  if (ret < 0) {
191  frame->status = ret;
192  decode_done = 1;
193  buffer->release();
194  goto push_frame;
195  }
196 
197  // The OMX.SEC decoder doesn't signal the modified width/height
198  if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
199  (w & 15 || h & 15)) {
200  if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == buffer->range_length()) {
201  w = (w + 15)&~15;
202  h = (h + 15)&~15;
203  }
204  }
205 
206  if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
207  avctx->width = w;
208  avctx->height = h;
209  }
210 
211  src_linesize[0] = av_image_get_linesize(avctx->pix_fmt, w, 0);
212  src_linesize[1] = av_image_get_linesize(avctx->pix_fmt, w, 1);
213  src_linesize[2] = av_image_get_linesize(avctx->pix_fmt, w, 2);
214 
215  src_data[0] = (uint8_t*)buffer->data();
216  src_data[1] = src_data[0] + src_linesize[0] * h;
217  src_data[2] = src_data[1] + src_linesize[1] * -(-h>>pix_desc->log2_chroma_h);
218  av_image_copy(frame->vframe->data, frame->vframe->linesize,
219  src_data, src_linesize,
220  avctx->pix_fmt, avctx->width, avctx->height);
221 
222  buffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
223  if (out_frame_index && s->ts_map->count(out_frame_index) > 0) {
224  frame->vframe->pts = (*s->ts_map)[out_frame_index].pts;
225  frame->vframe->reordered_opaque = (*s->ts_map)[out_frame_index].reordered_opaque;
226  s->ts_map->erase(out_frame_index);
227  }
228  buffer->release();
229  } else if (frame->status == INFO_FORMAT_CHANGED) {
230  if (buffer)
231  buffer->release();
232  av_free(frame);
233  continue;
234  } else {
235  decode_done = 1;
236  }
237 push_frame:
238  while (true) {
240  if (s->out_queue->size() >= 10) {
242  usleep(10000);
243  continue;
244  }
245  break;
246  }
247  s->out_queue->push_back(frame);
249  } while (!decode_done && !s->stop_decode);
250 
251  s->thread_exited = true;
252 
253  return 0;
254 }
255 
257 {
259  sp<MetaData> meta, outFormat;
260  int32_t colorFormat = 0;
261  int ret;
262 
263  if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
264  return -1;
265 
266  s->avctx = avctx;
267  s->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
268  if (!s->bsfc) {
269  av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
270  return -1;
271  }
272 
276  if (!s->orig_extradata) {
277  ret = AVERROR(ENOMEM);
278  goto fail;
279  }
280  memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
281 
282  meta = new MetaData;
283  if (!meta) {
284  ret = AVERROR(ENOMEM);
285  goto fail;
286  }
287  meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
288  meta->setInt32(kKeyWidth, avctx->width);
289  meta->setInt32(kKeyHeight, avctx->height);
290  meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
291 
292  android::ProcessState::self()->startThreadPool();
293 
294  s->source = new sp<MediaSource>();
295  *s->source = new CustomSource(avctx, meta);
296  s->in_queue = new List<Frame*>;
297  s->out_queue = new List<Frame*>;
298  s->ts_map = new std::map<int64_t, TimeStamp>;
299  s->client = new OMXClient;
300  s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
301  if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
302  !s->ts_map || !s->end_frame) {
303  ret = AVERROR(ENOMEM);
304  goto fail;
305  }
306 
307  if (s->client->connect() != OK) {
308  av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
309  ret = -1;
310  goto fail;
311  }
312 
313  s->decoder = new sp<MediaSource>();
314  *s->decoder = OMXCodec::Create(s->client->interface(), meta,
315  false, *s->source, NULL,
316  OMXCodec::kClientNeedsFramebuffer);
317  if ((*s->decoder)->start() != OK) {
318  av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
319  ret = -1;
320  s->client->disconnect();
321  goto fail;
322  }
323 
324  outFormat = (*s->decoder)->getFormat();
325  outFormat->findInt32(kKeyColorFormat, &colorFormat);
326  if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
327  colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
328  avctx->pix_fmt = AV_PIX_FMT_NV21;
329  else if (colorFormat == OMX_COLOR_FormatYCbYCr)
330  avctx->pix_fmt = AV_PIX_FMT_YUYV422;
331  else if (colorFormat == OMX_COLOR_FormatCbYCrY)
332  avctx->pix_fmt = AV_PIX_FMT_UYVY422;
333  else
334  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
335 
336  outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
337  if (s->decoder_component)
339 
343  return 0;
344 
345 fail:
348  av_freep(&s->end_frame);
349  delete s->in_queue;
350  delete s->out_queue;
351  delete s->ts_map;
352  delete s->client;
353  return ret;
354 }
355 
357  int *got_frame, AVPacket *avpkt)
358 {
360  Frame *frame;
361  status_t status;
362  int orig_size = avpkt->size;
363  AVPacket pkt = *avpkt;
364  AVFrame *ret_frame;
365 
366  if (!s->thread_started) {
368  return AVERROR(ENOMEM);
369  s->thread_started = true;
370  }
371 
372  if (avpkt && avpkt->data) {
374  avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
375  avpkt = &pkt;
376  }
377 
378  if (!s->source_done) {
379  if(!s->dummy_buf) {
380  s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
381  if (!s->dummy_buf)
382  return AVERROR(ENOMEM);
383  s->dummy_bufsize = avpkt->size;
384  memcpy(s->dummy_buf, avpkt->data, avpkt->size);
385  }
386 
387  frame = (Frame*)av_mallocz(sizeof(Frame));
388  if (avpkt->data) {
389  frame->status = OK;
390  frame->size = avpkt->size;
391  frame->key = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
392  frame->buffer = (uint8_t*)av_malloc(avpkt->size);
393  if (!frame->buffer) {
394  av_freep(&frame);
395  return AVERROR(ENOMEM);
396  }
397  uint8_t *ptr = avpkt->data;
398  // The OMX.SEC decoder fails without this.
399  if (avpkt->size == orig_size + avctx->extradata_size) {
400  ptr += avctx->extradata_size;
401  frame->size = orig_size;
402  }
403  memcpy(frame->buffer, ptr, orig_size);
404  if (avpkt == &pkt)
405  av_free(avpkt->data);
406 
407  frame->time = ++s->frame_index;
408  (*s->ts_map)[s->frame_index].pts = avpkt->pts;
409  (*s->ts_map)[s->frame_index].reordered_opaque = avctx->reordered_opaque;
410  } else {
411  frame->status = ERROR_END_OF_STREAM;
412  s->source_done = true;
413  }
414 
415  while (true) {
416  if (s->thread_exited) {
417  s->source_done = true;
418  break;
419  }
421  if (s->in_queue->size() >= 10) {
423  usleep(10000);
424  continue;
425  }
426  s->in_queue->push_back(frame);
429  break;
430  }
431  }
432  while (true) {
434  if (!s->out_queue->empty()) break;
436  if (s->source_done) {
437  usleep(10000);
438  continue;
439  } else {
440  return orig_size;
441  }
442  }
443 
444  frame = *s->out_queue->begin();
445  s->out_queue->erase(s->out_queue->begin());
447 
448  ret_frame = frame->vframe;
449  status = frame->status;
450  av_freep(&frame);
451 
452  if (status == ERROR_END_OF_STREAM)
453  return 0;
454  if (status != OK) {
455  if (status == AVERROR(ENOMEM))
456  return status;
457  av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
458  return -1;
459  }
460 
461  if (s->prev_frame)
463  s->prev_frame = ret_frame;
464 
465  *got_frame = 1;
466  *(AVFrame*)data = *ret_frame;
467  return orig_size;
468 }
469 
471 {
473  Frame *frame;
474 
475  if (s->thread_started) {
476  if (!s->thread_exited) {
477  s->stop_decode = 1;
478 
479  // Make sure decode_thread() doesn't get stuck
481  while (!s->out_queue->empty()) {
482  frame = *s->out_queue->begin();
483  s->out_queue->erase(s->out_queue->begin());
484  if (frame->vframe)
485  av_frame_free(&frame->vframe);
486  av_freep(&frame);
487  }
489 
490  // Feed a dummy frame prior to signalling EOF.
491  // This is required to terminate the decoder(OMX.SEC)
492  // when only one frame is read during stream info detection.
493  if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
494  frame->status = OK;
495  frame->size = s->dummy_bufsize;
496  frame->key = 1;
497  frame->buffer = s->dummy_buf;
499  s->in_queue->push_back(frame);
502  s->dummy_buf = NULL;
503  }
504 
506  s->end_frame->status = ERROR_END_OF_STREAM;
507  s->in_queue->push_back(s->end_frame);
510  s->end_frame = NULL;
511  }
512 
514 
515  if (s->prev_frame)
517 
518  s->thread_started = false;
519  }
520 
521  while (!s->in_queue->empty()) {
522  frame = *s->in_queue->begin();
523  s->in_queue->erase(s->in_queue->begin());
524  if (frame->size)
525  av_freep(&frame->buffer);
526  av_freep(&frame);
527  }
528 
529  while (!s->out_queue->empty()) {
530  frame = *s->out_queue->begin();
531  s->out_queue->erase(s->out_queue->begin());
532  if (frame->vframe)
533  av_frame_free(&frame->vframe);
534  av_freep(&frame);
535  }
536 
537  (*s->decoder)->stop();
538  s->client->disconnect();
539 
540  if (s->decoder_component)
542  av_freep(&s->dummy_buf);
543  av_freep(&s->end_frame);
544 
545  // Reset the extradata back to the original mp4 format, so that
546  // the next invocation (both when decoding and when called from
547  // av_find_stream_info) get the original mp4 format extradata.
548  av_freep(&avctx->extradata);
549  avctx->extradata = s->orig_extradata;
551 
552  delete s->in_queue;
553  delete s->out_queue;
554  delete s->ts_map;
555  delete s->client;
556  delete s->decoder;
557  delete s->source;
558 
563  return 0;
564 }
565 
567  "libstagefright_h264",
568  NULL_IF_CONFIG_SMALL("libstagefright H.264"),
572  NULL, //supported_framerates
573  NULL, //pix_fmts
574  NULL, //supported_samplerates
575  NULL, //sample_fmts
576  NULL, //channel_layouts
577  0, //max_lowres
578  NULL, //priv_class
579  NULL, //profiles
580  sizeof(StagefrightContext),
581  NULL, //next
582  NULL, //init_thread_copy
583  NULL, //update_thread_context
584  NULL, //defaults
585  NULL, //init_static_data
587  NULL, //encode
588  NULL, //encode2
591 };
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:83
#define NULL
Definition: coverity.c:32
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane...
Definition: imgutils.c:75
const char * s
Definition: avisynth_c.h:631
pthread_mutex_t in_mutex
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:94
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2090
This structure describes decoded (raw) audio or video data.
Definition: frame.h:171
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:153
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
sp< MediaSource > * decoder
sp< MetaData > source_meta
misc image utilities
virtual status_t start(MetaData *params)
int size
Definition: avcodec.h:1163
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1444
virtual status_t stop()
static AVPacket pkt
uint8_t * buffer
AVCodec.
Definition: avcodec.h:3181
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:124
pthread_cond_t condition
Definition: ffplay.c:148
HMTX pthread_mutex_t
Definition: os2threads.h:40
if()
Definition: avfilter.c:975
uint8_t
#define av_cold
Definition: attributes.h:74
#define av_malloc(s)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:135
#define OMX_QCOM_COLOR_FormatYVU420SemiPlanar
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:257
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1355
MediaBufferGroup buf_group
static AVFrame * frame
uint8_t * data
Definition: avcodec.h:1162
static int push_frame(AVFilterContext *ctx, unsigned in_no, AVFrame *buf)
Definition: avf_concat.c:162
static av_cold int Stagefright_init(AVCodecContext *avctx)
static av_always_inline int pthread_cond_signal(pthread_cond_t *cond)
Definition: os2threads.h:131
CustomSource(AVCodecContext *avctx, sp< MetaData > meta)
const OptionDef options[]
Definition: ffserver.c:3798
#define av_log(a,...)
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: avcodec.h:1208
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
#define CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: avcodec.h:824
const char * decoder_component
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:148
size_t size
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:175
void av_bitstream_filter_close(AVBitStreamFilterContext *bsf)
Release bitstream filter context.
void * decode_thread(void *arg)
const char * arg
Definition: jacosubdec.c:66
GLenum GLint * params
Definition: opengl_enc.c:114
static int Stagefright_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
status_t status
AVFrame * vframe
Libavcodec external API header.
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:288
int flags
A combination of AV_PKT_FLAG values.
Definition: avcodec.h:1168
#define FF_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
Definition: avcodec.h:630
as above, but U and V bytes are swapped
Definition: pixfmt.h:92
volatile sig_atomic_t thread_started
ret
Definition: avfilter.c:974
int width
picture width / height.
Definition: avcodec.h:1414
AVBitStreamFilterContext * av_bitstream_filter_init(const char *name)
Create and initialize a bitstream filter context given a bitstream filter name.
StagefrightContext * s
int32_t
static av_always_inline int pthread_join(pthread_t thread, void **value_ptr)
Definition: os2threads.h:80
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:87
AVBitStreamFilterContext * bsfc
int64_t reordered_opaque
opaque 64bit number (generally a PTS) that will be reordered and output in AVFrame.reordered_opaque
Definition: avcodec.h:2637
virtual status_t read(MediaBuffer **buffer, const MediaSource::ReadOptions *options)
volatile sig_atomic_t stop_decode
AVCodecContext * avctx
static av_always_inline int pthread_create(pthread_t *thread, const pthread_attr_t *attr, void *(*start_routine)(void *), void *arg)
Definition: os2threads.h:64
int av_bitstream_filter_filter(AVBitStreamFilterContext *bsfc, AVCodecContext *avctx, const char *args, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int keyframe)
Filter bitstream.
int frame_size
Definition: mxfenc.c:1803
char * av_strdup(const char *s)
Duplicate the string s.
Definition: mem.c:265
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:199
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
main external API structure.
Definition: avcodec.h:1241
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: utils.c:1035
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:64
int extradata_size
Definition: avcodec.h:1356
int64_t reordered_opaque
reordered opaque 64bit (generally an integer or a double precision float PTS but can be anything)...
Definition: frame.h:399
pthread_mutex_t out_mutex
pthread_t decode_thread_id
int64_t time
static int64_t pts
Global timestamp for the audio frames.
List< Frame * > * out_queue
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:182
List< Frame * > * in_queue
std::map< int64_t, TimeStamp > * ts_map
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:63
common internal api header.
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:115
AVCodec ff_libstagefright_h264_decoder
void * priv_data
Definition: avcodec.h:1283
virtual sp< MetaData > getFormat()
#define av_free(p)
int64_t reordered_opaque
static av_always_inline int pthread_mutex_unlock(pthread_mutex_t *mutex)
Definition: os2threads.h:108
static av_cold int Stagefright_close(AVCodecContext *avctx)
volatile sig_atomic_t thread_exited
#define av_freep(p)
static av_always_inline int pthread_mutex_lock(pthread_mutex_t *mutex)
Definition: os2threads.h:101
This structure stores compressed data.
Definition: avcodec.h:1139
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:969
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:250
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1155
GLuint buffer
Definition: opengl_enc.c:102
sp< MediaSource > * source