FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
qsvdec.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV codec-independent code
3  *
4  * copyright (c) 2013 Luca Barbato
5  * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <string.h>
25 #include <sys/types.h>
26 
27 #include <mfx/mfxvideo.h>
28 
29 #include "libavutil/common.h"
30 #include "libavutil/hwcontext.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/log.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/pixfmt.h"
36 #include "libavutil/time.h"
37 
38 #include "avcodec.h"
39 #include "internal.h"
40 #include "qsv.h"
41 #include "qsv_internal.h"
42 #include "qsvdec.h"
43 
45  &(const AVCodecHWConfigInternal) {
46  .public = {
50  .device_type = AV_HWDEVICE_TYPE_QSV,
51  },
52  .hwaccel = NULL,
53  },
54  NULL
55 };
56 
57 static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
58  AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
59 {
60  int ret;
61 
62  if (session) {
63  q->session = session;
64  } else if (hw_frames_ref) {
65  if (q->internal_session) {
66  MFXClose(q->internal_session);
68  }
70 
71  q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref);
72  if (!q->frames_ctx.hw_frames_ctx)
73  return AVERROR(ENOMEM);
74 
76  &q->frames_ctx, q->load_plugins,
77  q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY);
78  if (ret < 0) {
80  return ret;
81  }
82 
83  q->session = q->internal_session;
84  } else if (hw_device_ref) {
85  if (q->internal_session) {
86  MFXClose(q->internal_session);
88  }
89 
91  hw_device_ref, q->load_plugins);
92  if (ret < 0)
93  return ret;
94 
95  q->session = q->internal_session;
96  } else {
97  if (!q->internal_session) {
99  q->load_plugins);
100  if (ret < 0)
101  return ret;
102  }
103 
104  q->session = q->internal_session;
105  }
106 
107  /* make sure the decoder is uninitialized */
108  MFXVideoDECODE_Close(q->session);
109 
110  return 0;
111 }
112 
114 {
115  const AVPixFmtDescriptor *desc;
116  mfxSession session = NULL;
117  int iopattern = 0;
118  mfxVideoParam param = { 0 };
119  int frame_width = avctx->coded_width;
120  int frame_height = avctx->coded_height;
121  int ret;
122 
123  desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
124  if (!desc)
125  return AVERROR_BUG;
126 
127  if (!q->async_fifo) {
128  q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
129  (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*)));
130  if (!q->async_fifo)
131  return AVERROR(ENOMEM);
132  }
133 
134  if (avctx->pix_fmt == AV_PIX_FMT_QSV && avctx->hwaccel_context) {
135  AVQSVContext *user_ctx = avctx->hwaccel_context;
136  session = user_ctx->session;
137  iopattern = user_ctx->iopattern;
138  q->ext_buffers = user_ctx->ext_buffers;
139  q->nb_ext_buffers = user_ctx->nb_ext_buffers;
140  }
141 
142  if (avctx->hw_frames_ctx) {
143  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
144  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
145 
146  if (!iopattern) {
147  if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
148  iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
149  else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
150  iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
151  }
152 
153  frame_width = frames_hwctx->surfaces[0].Info.Width;
154  frame_height = frames_hwctx->surfaces[0].Info.Height;
155  }
156 
157  if (!iopattern)
158  iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
159  q->iopattern = iopattern;
160 
161  ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx, avctx->hw_device_ctx);
162  if (ret < 0) {
163  av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
164  return ret;
165  }
166 
167  ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
168  if (ret < 0)
169  return ret;
170 
171  param.mfx.CodecId = ret;
172  param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
173  param.mfx.CodecLevel = avctx->level == FF_LEVEL_UNKNOWN ? MFX_LEVEL_UNKNOWN : avctx->level;
174 
175  param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth;
176  param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth;
177  param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8;
178  param.mfx.FrameInfo.FourCC = q->fourcc;
179  param.mfx.FrameInfo.Width = frame_width;
180  param.mfx.FrameInfo.Height = frame_height;
181  param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
182 
183  switch (avctx->field_order) {
185  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
186  break;
187  case AV_FIELD_TT:
188  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
189  break;
190  case AV_FIELD_BB:
191  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
192  break;
193  default:
194  param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
195  break;
196  }
197 
198  param.IOPattern = q->iopattern;
199  param.AsyncDepth = q->async_depth;
200  param.ExtParam = q->ext_buffers;
201  param.NumExtParam = q->nb_ext_buffers;
202 
203  ret = MFXVideoDECODE_Init(q->session, &param);
204  if (ret < 0)
205  return ff_qsv_print_error(avctx, ret,
206  "Error initializing the MFX video decoder");
207 
208  q->frame_info = param.mfx.FrameInfo;
209 
210  return 0;
211 }
212 
214 {
215  int ret;
216 
217  ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
218  if (ret < 0)
219  return ret;
220 
221  if (frame->frame->format == AV_PIX_FMT_QSV) {
222  frame->surface = *(mfxFrameSurface1*)frame->frame->data[3];
223  } else {
224  frame->surface.Info = q->frame_info;
225 
226  frame->surface.Data.PitchLow = frame->frame->linesize[0];
227  frame->surface.Data.Y = frame->frame->data[0];
228  frame->surface.Data.UV = frame->frame->data[1];
229  }
230 
231  if (q->frames_ctx.mids) {
232  ret = ff_qsv_find_surface_idx(&q->frames_ctx, frame);
233  if (ret < 0)
234  return ret;
235 
236  frame->surface.Data.MemId = &q->frames_ctx.mids[ret];
237  }
238 
239  frame->used = 1;
240 
241  return 0;
242 }
243 
245 {
246  QSVFrame *cur = q->work_frames;
247  while (cur) {
248  if (cur->used && !cur->surface.Data.Locked && !cur->queued) {
249  cur->used = 0;
250  av_frame_unref(cur->frame);
251  }
252  cur = cur->next;
253  }
254 }
255 
256 static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
257 {
258  QSVFrame *frame, **last;
259  int ret;
260 
262 
263  frame = q->work_frames;
264  last = &q->work_frames;
265  while (frame) {
266  if (!frame->used) {
267  ret = alloc_frame(avctx, q, frame);
268  if (ret < 0)
269  return ret;
270  *surf = &frame->surface;
271  return 0;
272  }
273 
274  last = &frame->next;
275  frame = frame->next;
276  }
277 
278  frame = av_mallocz(sizeof(*frame));
279  if (!frame)
280  return AVERROR(ENOMEM);
281  frame->frame = av_frame_alloc();
282  if (!frame->frame) {
283  av_freep(&frame);
284  return AVERROR(ENOMEM);
285  }
286  *last = frame;
287 
288  ret = alloc_frame(avctx, q, frame);
289  if (ret < 0)
290  return ret;
291 
292  *surf = &frame->surface;
293 
294  return 0;
295 }
296 
297 static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
298 {
299  QSVFrame *cur = q->work_frames;
300  while (cur) {
301  if (surf == &cur->surface)
302  return cur;
303  cur = cur->next;
304  }
305  return NULL;
306 }
307 
308 static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
309  AVFrame *frame, int *got_frame,
310  AVPacket *avpkt)
311 {
312  QSVFrame *out_frame;
313  mfxFrameSurface1 *insurf;
314  mfxFrameSurface1 *outsurf;
315  mfxSyncPoint *sync;
316  mfxBitstream bs = { { { 0 } } };
317  int ret;
318 
319  if (avpkt->size) {
320  bs.Data = avpkt->data;
321  bs.DataLength = avpkt->size;
322  bs.MaxLength = bs.DataLength;
323  bs.TimeStamp = avpkt->pts;
324  }
325 
326  sync = av_mallocz(sizeof(*sync));
327  if (!sync) {
328  av_freep(&sync);
329  return AVERROR(ENOMEM);
330  }
331 
332  do {
333  ret = get_surface(avctx, q, &insurf);
334  if (ret < 0) {
335  av_freep(&sync);
336  return ret;
337  }
338 
339  ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
340  insurf, &outsurf, sync);
341  if (ret == MFX_WRN_DEVICE_BUSY)
342  av_usleep(500);
343 
344  } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);
345 
346  if (ret != MFX_ERR_NONE &&
347  ret != MFX_ERR_MORE_DATA &&
348  ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
349  ret != MFX_ERR_MORE_SURFACE) {
350  av_freep(&sync);
351  return ff_qsv_print_error(avctx, ret,
352  "Error during QSV decoding.");
353  }
354 
355  /* make sure we do not enter an infinite loop if the SDK
356  * did not consume any data and did not return anything */
357  if (!*sync && !bs.DataOffset) {
358  bs.DataOffset = avpkt->size;
359  ++q->zero_consume_run;
360  if (q->zero_consume_run > 1)
361  ff_qsv_print_warning(avctx, ret, "A decode call did not consume any data");
362  } else {
363  q->zero_consume_run = 0;
364  }
365 
366  if (*sync) {
367  QSVFrame *out_frame = find_frame(q, outsurf);
368 
369  if (!out_frame) {
370  av_log(avctx, AV_LOG_ERROR,
371  "The returned surface does not correspond to any frame\n");
372  av_freep(&sync);
373  return AVERROR_BUG;
374  }
375 
376  out_frame->queued = 1;
377  av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
378  av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
379  } else {
380  av_freep(&sync);
381  }
382 
383  if (!av_fifo_space(q->async_fifo) ||
384  (!avpkt->size && av_fifo_size(q->async_fifo))) {
385  AVFrame *src_frame;
386 
387  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
388  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
389  out_frame->queued = 0;
390 
391  if (avctx->pix_fmt != AV_PIX_FMT_QSV) {
392  do {
393  ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
394  } while (ret == MFX_WRN_IN_EXECUTION);
395  }
396 
397  av_freep(&sync);
398 
399  src_frame = out_frame->frame;
400 
401  ret = av_frame_ref(frame, src_frame);
402  if (ret < 0)
403  return ret;
404 
405  outsurf = &out_frame->surface;
406 
407 #if FF_API_PKT_PTS
409  frame->pkt_pts = outsurf->Data.TimeStamp;
411 #endif
412  frame->pts = outsurf->Data.TimeStamp;
413 
414  frame->repeat_pict =
415  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
416  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
417  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
418  frame->top_field_first =
419  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
420  frame->interlaced_frame =
421  !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);
422 
423  /* update the surface properties */
424  if (avctx->pix_fmt == AV_PIX_FMT_QSV)
425  ((mfxFrameSurface1*)frame->data[3])->Info = outsurf->Info;
426 
427  *got_frame = 1;
428  }
429 
430  return bs.DataOffset;
431 }
432 
434 {
435  QSVFrame *cur = q->work_frames;
436 
437  if (q->session)
438  MFXVideoDECODE_Close(q->session);
439 
440  while (q->async_fifo && av_fifo_size(q->async_fifo)) {
441  QSVFrame *out_frame;
442  mfxSyncPoint *sync;
443 
444  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
445  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
446 
447  av_freep(&sync);
448  }
449 
450  while (cur) {
451  q->work_frames = cur->next;
452  av_frame_free(&cur->frame);
453  av_freep(&cur);
454  cur = q->work_frames;
455  }
456 
458  q->async_fifo = NULL;
459 
462 
463  if (q->internal_session)
464  MFXClose(q->internal_session);
465 
468 
469  return 0;
470 }
471 
473  AVFrame *frame, int *got_frame, AVPacket *pkt)
474 {
475  uint8_t *dummy_data;
476  int dummy_size;
477  int ret;
478 
479  if (!q->avctx_internal) {
481  if (!q->avctx_internal)
482  return AVERROR(ENOMEM);
483 
484  q->parser = av_parser_init(avctx->codec_id);
485  if (!q->parser)
486  return AVERROR(ENOMEM);
487 
490  }
491 
492  if (!pkt->size)
493  return qsv_decode(avctx, q, frame, got_frame, pkt);
494 
495  /* we assume the packets are already split properly and want
496  * just the codec parameters here */
498  &dummy_data, &dummy_size,
499  pkt->data, pkt->size, pkt->pts, pkt->dts,
500  pkt->pos);
501 
502  /* TODO: flush delayed frames on reinit */
503  if (q->parser->format != q->orig_pix_fmt ||
504  q->parser->coded_width != avctx->coded_width ||
505  q->parser->coded_height != avctx->coded_height) {
508  AV_PIX_FMT_NONE };
509  enum AVPixelFormat qsv_format;
510 
511  qsv_format = ff_qsv_map_pixfmt(q->parser->format, &q->fourcc);
512  if (qsv_format < 0) {
513  av_log(avctx, AV_LOG_ERROR,
514  "Decoding pixel format '%s' is not supported\n",
516  ret = AVERROR(ENOSYS);
517  goto reinit_fail;
518  }
519 
520  q->orig_pix_fmt = q->parser->format;
521  avctx->pix_fmt = pix_fmts[1] = qsv_format;
522  avctx->width = q->parser->width;
523  avctx->height = q->parser->height;
524  avctx->coded_width = q->parser->coded_width;
525  avctx->coded_height = q->parser->coded_height;
526  avctx->field_order = q->parser->field_order;
527  avctx->level = q->avctx_internal->level;
528  avctx->profile = q->avctx_internal->profile;
529 
530  ret = ff_get_format(avctx, pix_fmts);
531  if (ret < 0)
532  goto reinit_fail;
533 
534  avctx->pix_fmt = ret;
535 
536  ret = qsv_decode_init(avctx, q);
537  if (ret < 0)
538  goto reinit_fail;
539  }
540 
541  return qsv_decode(avctx, q, frame, got_frame, pkt);
542 
543 reinit_fail:
544  q->orig_pix_fmt = q->parser->format = avctx->pix_fmt = AV_PIX_FMT_NONE;
545  return ret;
546 }
547 
549 {
551 }
AVCodecHWConfig public
This is the structure which will be returned to the user by avcodec_get_hw_config().
Definition: hwaccel.h:34
#define NULL
Definition: coverity.c:32
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1288
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:113
int iopattern
Definition: qsvdec.h:66
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2363
This structure describes decoded (raw) audio or video data.
Definition: frame.h:201
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:1675
Memory handling functions.
int64_t pos
byte position in stream, -1 if unknown
Definition: avcodec.h:1420
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
const char * desc
Definition: nvenc.c:63
int width
Dimensions of the decoded video intended for presentation.
Definition: avcodec.h:5062
enum AVFieldOrder field_order
Definition: avcodec.h:5039
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:343
int size
Definition: avcodec.h:1401
int coded_width
Dimensions of the coded video.
Definition: avcodec.h:5068
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1697
mfxExtBuffer ** ext_buffers
Definition: qsvdec.h:70
The codec supports this format via the hw_frames_ctx interface.
Definition: avcodec.h:3306
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
static AVPacket pkt
static int qsv_decode(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: qsvdec.c:308
int profile
profile
Definition: avcodec.h:2813
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:62
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:140
#define FF_LEVEL_UNKNOWN
Definition: avcodec.h:2912
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:117
uint8_t
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:2656
AVBufferRef * mids_buf
Definition: qsv_internal.h:69
int ff_qsv_decode_close(QSVContext *q)
Definition: qsvdec.c:433
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:400
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:294
enum AVPixelFormat pix_fmt
A hardware pixel format which the codec can use.
Definition: avcodec.h:3329
int av_fifo_space(const AVFifoBuffer *f)
Return the amount of space in bytes in the AVFifoBuffer, that is the amount of data you can write int...
Definition: fifo.c:82
static AVFrame * frame
int queued
Definition: qsv_internal.h:55
uint8_t * data
Definition: avcodec.h:1400
void av_fifo_free(AVFifoBuffer *f)
Free an AVFifoBuffer.
Definition: fifo.c:55
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:348
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque)
Definition: qsv.c:611
#define av_log(a,...)
static QSVFrame * find_frame(QSVContext *q, mfxFrameSurface1 *surf)
Definition: qsvdec.c:297
AVCodecParserContext * parser
Definition: qsvdec.h:58
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:187
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
int iopattern
The IO pattern to use.
Definition: qsv.h:46
int nb_ext_buffers
Definition: qsv.h:52
void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:548
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:150
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc)
Definition: qsv.c:170
int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile)
Definition: qsv.c:69
int zero_consume_run
Definition: qsvdec.h:55
AVCodecContext * avcodec_alloc_context3(const AVCodec *codec)
Allocate an AVCodecContext and set its fields to default values.
Definition: options.c:157
int av_parser_parse2(AVCodecParserContext *s, AVCodecContext *avctx, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int64_t pts, int64_t dts, int64_t pos)
Parse a packet.
Definition: parser.c:137
int width
picture width / height.
Definition: avcodec.h:1660
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:3154
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:42
void av_parser_close(AVCodecParserContext *s)
Definition: parser.c:241
int level
level
Definition: avcodec.h:2911
mfxFrameSurface1 surface
Definition: qsv_internal.h:52
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:274
enum AVPixelFormat orig_pix_fmt
Definition: qsvdec.h:60
AVCodecParserContext * av_parser_init(int codec_id)
Definition: parser.c:52
Libavcodec external API header.
void avcodec_free_context(AVCodecContext **avctx)
Free the codec context and everything associated with it and write NULL to the provided pointer...
Definition: options.c:172
enum AVCodecID codec_id
Definition: avcodec.h:1498
mfxSession internal_session
Definition: qsvdec.h:45
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
mfxExtBuffer ** ext_buffers
Extra buffers to pass to encoder or decoder initialization.
Definition: qsv.h:51
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:232
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame)
Definition: qsvdec.c:213
main external API structure.
Definition: avcodec.h:1488
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:58
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:160
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1886
static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
Definition: qsvdec.c:256
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int coded_height
Definition: avcodec.h:1675
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:122
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:218
char * load_plugins
Definition: qsvdec.h:68
This struct is used for communicating QSV parameters between libavcodec and the caller.
Definition: qsv.h:36
static void qsv_clear_unused_frames(QSVContext *q)
Definition: qsvdec.c:244
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:510
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
attribute_deprecated int64_t pkt_pts
PTS copied from the AVPacket that was decoded to produce this frame.
Definition: frame.h:302
A reference to a data buffer.
Definition: buffer.h:81
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
common internal api header.
common internal and external API header
if(ret< 0)
Definition: vf_mcdeint.c:279
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session, AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
Definition: qsvdec.c:57
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
The codec supports this format by some ad-hoc method.
Definition: avcodec.h:3322
int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session, const char *load_plugins)
Definition: qsv.c:249
#define PARSER_FLAG_COMPLETE_FRAMES
Definition: avcodec.h:4949
mfxFrameInfo frame_info
Definition: qsvdec.h:62
pixel format definitions
AVCodecContext * avctx_internal
Definition: qsvdec.h:59
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:353
QSVFramesContext frames_ctx
Definition: qsvdec.h:47
int format
The format of the coded data, corresponds to enum AVPixelFormat for video and for enum AVSampleFormat...
Definition: avcodec.h:5079
mfxSession session
Definition: qsvdec.h:41
AVFifoBuffer * async_fifo
Definition: qsvdec.h:54
uint32_t fourcc
Definition: qsvdec.h:61
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
Definition: avcodec.h:1399
#define av_freep(p)
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:2140
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:51
const AVCodecHWConfigInternal * ff_qsv_hw_configs[]
Definition: qsvdec.c:44
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2279
int async_depth
Definition: qsvdec.h:65
int depth
Number of bits in the component.
Definition: pixdesc.h:58
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:3206
int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *pkt)
Definition: qsvdec.c:472
QSVFrame * work_frames
a linked list of frames currently being used by QSV
Definition: qsvdec.h:52
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
This structure stores compressed data.
Definition: avcodec.h:1377
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:1118
mfxSession session
If non-NULL, the session to use for encoding or decoding.
Definition: qsv.h:41
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1393
enum AVPixelFormat sw_pix_fmt
Nominal unaccelerated pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:3005
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins)
Definition: qsv.c:543
int nb_ext_buffers
Definition: qsvdec.h:71