FFmpeg
crystalhd.c
Go to the documentation of this file.
1 /*
2  * - CrystalHD decoder module -
3  *
4  * Copyright(C) 2010,2011 Philip Langdale <ffmpeg.philipl@overt.org>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /*
24  * - Principles of Operation -
25  *
26  * The CrystalHD decoder operates at the bitstream level - which is an even
27  * higher level than the decoding hardware you typically see in modern GPUs.
28  * This means it has a very simple interface, in principle. You feed demuxed
29  * packets in one end and get decoded picture (fields/frames) out the other.
30  *
31  * Of course, nothing is ever that simple. Due, at the very least, to b-frame
32  * dependencies in the supported formats, the hardware has a delay between
33  * when a packet goes in, and when a picture comes out. Furthermore, this delay
34  * is not just a function of time, but also one of the dependency on additional
35  * frames being fed into the decoder to satisfy the b-frame dependencies.
36  *
37  * As such, the hardware can only be used effectively with a decode API that
38  * doesn't assume a 1:1 relationship between input packets and output frames.
39  * The new avcodec decode API is such an API (an m:n API) while the old one is
40  * 1:1. Consequently, we no longer support the old API, which allows us to avoid
41  * the vicious hacks that are required to approximate 1:1 operation.
42  */
43 
44 /*****************************************************************************
45  * Includes
46  ****************************************************************************/
47 
48 #define _XOPEN_SOURCE 600
49 #include <inttypes.h>
50 #include <stdio.h>
51 #include <stdlib.h>
52 
53 #include <libcrystalhd/bc_dts_types.h>
54 #include <libcrystalhd/bc_dts_defs.h>
55 #include <libcrystalhd/libcrystalhd_if.h>
56 
57 #include "avcodec.h"
58 #include "decode.h"
59 #include "internal.h"
60 #include "libavutil/imgutils.h"
61 #include "libavutil/intreadwrite.h"
62 #include "libavutil/opt.h"
63 
64 #if HAVE_UNISTD_H
65 #include <unistd.h>
66 #endif
67 
68 /** Timeout parameter passed to DtsProcOutput() in us */
69 #define OUTPUT_PROC_TIMEOUT 50
70 /** Step between fake timestamps passed to hardware in units of 100ns */
71 #define TIMESTAMP_UNIT 100000
72 
73 
74 /*****************************************************************************
75  * Module private data
76  ****************************************************************************/
77 
78 typedef enum {
79  RET_ERROR = -1,
80  RET_OK = 0,
83 
84 typedef struct OpaqueList {
85  struct OpaqueList *next;
86  uint64_t fake_timestamp;
87  uint64_t reordered_opaque;
88 } OpaqueList;
89 
90 typedef struct {
94 
98 
101 
102  /* Options */
103  uint32_t sWidth;
104 } CHDContext;
105 
106 static const AVOption options[] = {
107  { "crystalhd_downscale_width",
108  "Turn on downscaling to the specified width",
109  offsetof(CHDContext, sWidth),
110  AV_OPT_TYPE_INT, {.i64 = 0}, 0, UINT32_MAX,
112  { NULL, },
113 };
114 
115 
116 /*****************************************************************************
117  * Helper functions
118  ****************************************************************************/
119 
120 static inline BC_MEDIA_SUBTYPE id2subtype(CHDContext *priv, enum AVCodecID id)
121 {
122  switch (id) {
123  case AV_CODEC_ID_MPEG4:
124  return BC_MSUBTYPE_DIVX;
126  return BC_MSUBTYPE_DIVX311;
128  return BC_MSUBTYPE_MPEG2VIDEO;
129  case AV_CODEC_ID_VC1:
130  return BC_MSUBTYPE_VC1;
131  case AV_CODEC_ID_WMV3:
132  return BC_MSUBTYPE_WMV3;
133  case AV_CODEC_ID_H264:
134  return BC_MSUBTYPE_H264;
135  default:
136  return BC_MSUBTYPE_INVALID;
137  }
138 }
139 
140 static inline void print_frame_info(CHDContext *priv, BC_DTS_PROC_OUT *output)
141 {
142  av_log(priv->avctx, AV_LOG_TRACE, "\tYBuffSz: %u\n", output->YbuffSz);
143  av_log(priv->avctx, AV_LOG_TRACE, "\tYBuffDoneSz: %u\n",
144  output->YBuffDoneSz);
145  av_log(priv->avctx, AV_LOG_TRACE, "\tUVBuffDoneSz: %u\n",
146  output->UVBuffDoneSz);
147  av_log(priv->avctx, AV_LOG_TRACE, "\tTimestamp: %"PRIu64"\n",
148  output->PicInfo.timeStamp);
149  av_log(priv->avctx, AV_LOG_TRACE, "\tPicture Number: %u\n",
150  output->PicInfo.picture_number);
151  av_log(priv->avctx, AV_LOG_TRACE, "\tWidth: %u\n",
152  output->PicInfo.width);
153  av_log(priv->avctx, AV_LOG_TRACE, "\tHeight: %u\n",
154  output->PicInfo.height);
155  av_log(priv->avctx, AV_LOG_TRACE, "\tChroma: 0x%03x\n",
156  output->PicInfo.chroma_format);
157  av_log(priv->avctx, AV_LOG_TRACE, "\tPulldown: %u\n",
158  output->PicInfo.pulldown);
159  av_log(priv->avctx, AV_LOG_TRACE, "\tFlags: 0x%08x\n",
160  output->PicInfo.flags);
161  av_log(priv->avctx, AV_LOG_TRACE, "\tFrame Rate/Res: %u\n",
162  output->PicInfo.frame_rate);
163  av_log(priv->avctx, AV_LOG_TRACE, "\tAspect Ratio: %u\n",
164  output->PicInfo.aspect_ratio);
165  av_log(priv->avctx, AV_LOG_TRACE, "\tColor Primaries: %u\n",
166  output->PicInfo.colour_primaries);
167  av_log(priv->avctx, AV_LOG_TRACE, "\tMetaData: %u\n",
168  output->PicInfo.picture_meta_payload);
169  av_log(priv->avctx, AV_LOG_TRACE, "\tSession Number: %u\n",
170  output->PicInfo.sess_num);
171  av_log(priv->avctx, AV_LOG_TRACE, "\tycom: %u\n",
172  output->PicInfo.ycom);
173  av_log(priv->avctx, AV_LOG_TRACE, "\tCustom Aspect: %u\n",
174  output->PicInfo.custom_aspect_ratio_width_height);
175  av_log(priv->avctx, AV_LOG_TRACE, "\tFrames to Drop: %u\n",
176  output->PicInfo.n_drop);
177  av_log(priv->avctx, AV_LOG_TRACE, "\tH264 Valid Fields: 0x%08x\n",
178  output->PicInfo.other.h264.valid);
179 }
180 
181 
182 /*****************************************************************************
183  * OpaqueList functions
184  ****************************************************************************/
185 
186 static uint64_t opaque_list_push(CHDContext *priv, uint64_t reordered_opaque)
187 {
188  OpaqueList *newNode = av_mallocz(sizeof (OpaqueList));
189  if (!newNode) {
190  av_log(priv->avctx, AV_LOG_ERROR,
191  "Unable to allocate new node in OpaqueList.\n");
192  return 0;
193  }
194  if (!priv->head) {
195  newNode->fake_timestamp = TIMESTAMP_UNIT;
196  priv->head = newNode;
197  } else {
198  newNode->fake_timestamp = priv->tail->fake_timestamp + TIMESTAMP_UNIT;
199  priv->tail->next = newNode;
200  }
201  priv->tail = newNode;
202  newNode->reordered_opaque = reordered_opaque;
203 
204  return newNode->fake_timestamp;
205 }
206 
207 /*
208  * The OpaqueList is built in decode order, while elements will be removed
209  * in presentation order. If frames are reordered, this means we must be
210  * able to remove elements that are not the first element.
211  *
212  * Returned node must be freed by caller.
213  */
214 static OpaqueList *opaque_list_pop(CHDContext *priv, uint64_t fake_timestamp)
215 {
216  OpaqueList *node = priv->head;
217 
218  if (!priv->head) {
219  av_log(priv->avctx, AV_LOG_ERROR,
220  "CrystalHD: Attempted to query non-existent timestamps.\n");
221  return NULL;
222  }
223 
224  /*
225  * The first element is special-cased because we have to manipulate
226  * the head pointer rather than the previous element in the list.
227  */
228  if (priv->head->fake_timestamp == fake_timestamp) {
229  priv->head = node->next;
230 
231  if (!priv->head->next)
232  priv->tail = priv->head;
233 
234  node->next = NULL;
235  return node;
236  }
237 
238  /*
239  * The list is processed at arm's length so that we have the
240  * previous element available to rewrite its next pointer.
241  */
242  while (node->next) {
243  OpaqueList *current = node->next;
244  if (current->fake_timestamp == fake_timestamp) {
245  node->next = current->next;
246 
247  if (!node->next)
248  priv->tail = node;
249 
250  current->next = NULL;
251  return current;
252  } else {
253  node = current;
254  }
255  }
256 
257  av_log(priv->avctx, AV_LOG_VERBOSE,
258  "CrystalHD: Couldn't match fake_timestamp.\n");
259  return NULL;
260 }
261 
262 
263 /*****************************************************************************
264  * Video decoder API function definitions
265  ****************************************************************************/
266 
267 static void flush(AVCodecContext *avctx)
268 {
269  CHDContext *priv = avctx->priv_data;
270 
271  priv->need_second_field = 0;
272  priv->draining = 0;
273 
274  /* Flush mode 4 flushes all software and hardware buffers. */
275  DtsFlushInput(priv->dev, 4);
276 }
277 
278 
279 static av_cold int uninit(AVCodecContext *avctx)
280 {
281  CHDContext *priv = avctx->priv_data;
282  HANDLE device;
283 
284  device = priv->dev;
285  DtsStopDecoder(device);
286  DtsCloseDecoder(device);
287  DtsDeviceClose(device);
288 
289  if (priv->head) {
290  OpaqueList *node = priv->head;
291  while (node) {
292  OpaqueList *next = node->next;
293  av_free(node);
294  node = next;
295  }
296  }
297 
298  return 0;
299 }
300 
301 static av_cold int init(AVCodecContext *avctx)
302 {
303  CHDContext* priv;
304  BC_STATUS ret;
305  BC_INFO_CRYSTAL version;
306  BC_INPUT_FORMAT format = {
307  .FGTEnable = FALSE,
308  .Progressive = TRUE,
309  .OptFlags = 0x80000000 | vdecFrameRate59_94 | 0x40,
310  .width = avctx->width,
311  .height = avctx->height,
312  };
313 
314  BC_MEDIA_SUBTYPE subtype;
315 
316  uint32_t mode = DTS_PLAYBACK_MODE |
317  DTS_LOAD_FILE_PLAY_FW |
318  DTS_SKIP_TX_CHK_CPB |
319  DTS_PLAYBACK_DROP_RPT_MODE |
320  DTS_SINGLE_THREADED_MODE |
321  DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);
322 
323  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
324  avctx->codec->name);
325 
326  avctx->pix_fmt = AV_PIX_FMT_YUYV422;
327 
328  /* Initialize the library */
329  priv = avctx->priv_data;
330  priv->avctx = avctx;
331  priv->draining = 0;
332 
333  subtype = id2subtype(priv, avctx->codec->id);
334  switch (subtype) {
335  case BC_MSUBTYPE_H264:
336  format.startCodeSz = 4;
337  // Fall-through
338  case BC_MSUBTYPE_VC1:
339  case BC_MSUBTYPE_WVC1:
340  case BC_MSUBTYPE_WMV3:
341  case BC_MSUBTYPE_WMVA:
342  case BC_MSUBTYPE_MPEG2VIDEO:
343  case BC_MSUBTYPE_DIVX:
344  case BC_MSUBTYPE_DIVX311:
345  format.pMetaData = avctx->extradata;
346  format.metaDataSz = avctx->extradata_size;
347  break;
348  default:
349  av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
350  return AVERROR(EINVAL);
351  }
352  format.mSubtype = subtype;
353 
354  if (priv->sWidth) {
355  format.bEnableScaling = 1;
356  format.ScalingParams.sWidth = priv->sWidth;
357  }
358 
359  /* Get a decoder instance */
360  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
361  // Initialize the Link and Decoder devices
362  ret = DtsDeviceOpen(&priv->dev, mode);
363  if (ret != BC_STS_SUCCESS) {
364  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
365  goto fail;
366  }
367 
368  ret = DtsCrystalHDVersion(priv->dev, &version);
369  if (ret != BC_STS_SUCCESS) {
370  av_log(avctx, AV_LOG_VERBOSE,
371  "CrystalHD: DtsCrystalHDVersion failed\n");
372  goto fail;
373  }
374  priv->is_70012 = version.device == 0;
375 
376  if (priv->is_70012 &&
377  (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311)) {
378  av_log(avctx, AV_LOG_VERBOSE,
379  "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
380  goto fail;
381  }
382 
383  ret = DtsSetInputFormat(priv->dev, &format);
384  if (ret != BC_STS_SUCCESS) {
385  av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
386  goto fail;
387  }
388 
389  ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
390  if (ret != BC_STS_SUCCESS) {
391  av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
392  goto fail;
393  }
394 
395  ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
396  if (ret != BC_STS_SUCCESS) {
397  av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
398  goto fail;
399  }
400  ret = DtsStartDecoder(priv->dev);
401  if (ret != BC_STS_SUCCESS) {
402  av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
403  goto fail;
404  }
405  ret = DtsStartCapture(priv->dev);
406  if (ret != BC_STS_SUCCESS) {
407  av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
408  goto fail;
409  }
410 
411  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");
412 
413  return 0;
414 
415  fail:
416  uninit(avctx);
417  return -1;
418 }
419 
420 
421 static inline CopyRet copy_frame(AVCodecContext *avctx,
422  BC_DTS_PROC_OUT *output,
423  AVFrame *frame, int *got_frame)
424 {
425  BC_STATUS ret;
426  BC_DTS_STATUS decoder_status = { 0, };
428 
429  CHDContext *priv = avctx->priv_data;
430  int64_t pkt_pts = AV_NOPTS_VALUE;
431 
432  uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
433  VDEC_FLAG_BOTTOMFIELD;
434  uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);
435 
436  int width = output->PicInfo.width;
437  int height = output->PicInfo.height;
438  int bwidth;
439  uint8_t *src = output->Ybuff;
440  int sStride;
441  uint8_t *dst;
442  int dStride;
443 
444  if (output->PicInfo.timeStamp != 0) {
445  OpaqueList *node = opaque_list_pop(priv, output->PicInfo.timeStamp);
446  if (node) {
447  pkt_pts = node->reordered_opaque;
448  av_free(node);
449  } else {
450  /*
451  * We will encounter a situation where a timestamp cannot be
452  * popped if a second field is being returned. In this case,
453  * each field has the same timestamp and the first one will
454  * cause it to be popped. We'll avoid overwriting the valid
455  * timestamp below.
456  */
457  }
458  av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
459  output->PicInfo.timeStamp);
460  }
461 
462  ret = DtsGetDriverStatus(priv->dev, &decoder_status);
463  if (ret != BC_STS_SUCCESS) {
464  av_log(avctx, AV_LOG_ERROR,
465  "CrystalHD: GetDriverStatus failed: %u\n", ret);
466  return RET_ERROR;
467  }
468 
469  interlaced = output->PicInfo.flags & VDEC_FLAG_INTERLACED_SRC;
470 
471  av_log(avctx, AV_LOG_VERBOSE, "Interlaced state: %d\n",
472  interlaced);
473 
475 
476  if (!frame->data[0]) {
477  if (ff_get_buffer(avctx, frame, 0) < 0)
478  return RET_ERROR;
479  }
480 
481  bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
482  if (bwidth < 0)
483  return RET_ERROR;
484 
485  if (priv->is_70012) {
486  int pStride;
487 
488  if (width <= 720)
489  pStride = 720;
490  else if (width <= 1280)
491  pStride = 1280;
492  else pStride = 1920;
493  sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
494  if (sStride < 0)
495  return RET_ERROR;
496  } else {
497  sStride = bwidth;
498  }
499 
500  dStride = frame->linesize[0];
501  dst = frame->data[0];
502 
503  av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");
504 
505  /*
506  * The hardware doesn't return the first sample of a picture.
507  * Ignoring why it behaves this way, it's better to copy the sample from
508  * the second line, rather than the next sample across because the chroma
509  * values should be correct (assuming the decoded video was 4:2:0, which
510  * it was).
511  */
512  *((uint32_t *)src) = *((uint32_t *)(src + sStride));
513 
514  if (interlaced) {
515  int dY = 0;
516  int sY = 0;
517 
518  height /= 2;
519  if (bottom_field) {
520  av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
521  dY = 1;
522  } else {
523  av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
524  dY = 0;
525  }
526 
527  for (sY = 0; sY < height; dY++, sY++) {
528  memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
529  dY++;
530  }
531  } else {
532  av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
533  }
534 
535  frame->interlaced_frame = interlaced;
536  if (interlaced)
537  frame->top_field_first = !bottom_first;
538 
539  frame->pts = pkt_pts;
540 #if FF_API_PKT_PTS
542  frame->pkt_pts = pkt_pts;
544 #endif
545 
546  frame->pkt_pos = -1;
547  frame->pkt_duration = 0;
548  frame->pkt_size = -1;
549 
550  if (!priv->need_second_field) {
551  *got_frame = 1;
552  } else {
553  return RET_COPY_AGAIN;
554  }
555 
556  return RET_OK;
557 }
558 
559 
560 static inline CopyRet receive_frame(AVCodecContext *avctx,
561  AVFrame *frame, int *got_frame)
562 {
563  BC_STATUS ret;
564  BC_DTS_PROC_OUT output = {
565  .PicInfo.width = avctx->width,
566  .PicInfo.height = avctx->height,
567  };
568  CHDContext *priv = avctx->priv_data;
569  HANDLE dev = priv->dev;
570 
571  *got_frame = 0;
572 
573  // Request decoded data from the driver
574  ret = DtsProcOutputNoCopy(dev, OUTPUT_PROC_TIMEOUT, &output);
575  if (ret == BC_STS_FMT_CHANGE) {
576  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Initial format change\n");
577  avctx->width = output.PicInfo.width;
578  avctx->height = output.PicInfo.height;
579  switch ( output.PicInfo.aspect_ratio ) {
580  case vdecAspectRatioSquare:
581  avctx->sample_aspect_ratio = (AVRational) { 1, 1};
582  break;
583  case vdecAspectRatio12_11:
584  avctx->sample_aspect_ratio = (AVRational) { 12, 11};
585  break;
586  case vdecAspectRatio10_11:
587  avctx->sample_aspect_ratio = (AVRational) { 10, 11};
588  break;
589  case vdecAspectRatio16_11:
590  avctx->sample_aspect_ratio = (AVRational) { 16, 11};
591  break;
592  case vdecAspectRatio40_33:
593  avctx->sample_aspect_ratio = (AVRational) { 40, 33};
594  break;
595  case vdecAspectRatio24_11:
596  avctx->sample_aspect_ratio = (AVRational) { 24, 11};
597  break;
598  case vdecAspectRatio20_11:
599  avctx->sample_aspect_ratio = (AVRational) { 20, 11};
600  break;
601  case vdecAspectRatio32_11:
602  avctx->sample_aspect_ratio = (AVRational) { 32, 11};
603  break;
604  case vdecAspectRatio80_33:
605  avctx->sample_aspect_ratio = (AVRational) { 80, 33};
606  break;
607  case vdecAspectRatio18_11:
608  avctx->sample_aspect_ratio = (AVRational) { 18, 11};
609  break;
610  case vdecAspectRatio15_11:
611  avctx->sample_aspect_ratio = (AVRational) { 15, 11};
612  break;
613  case vdecAspectRatio64_33:
614  avctx->sample_aspect_ratio = (AVRational) { 64, 33};
615  break;
616  case vdecAspectRatio160_99:
617  avctx->sample_aspect_ratio = (AVRational) {160, 99};
618  break;
619  case vdecAspectRatio4_3:
620  avctx->sample_aspect_ratio = (AVRational) { 4, 3};
621  break;
622  case vdecAspectRatio16_9:
623  avctx->sample_aspect_ratio = (AVRational) { 16, 9};
624  break;
625  case vdecAspectRatio221_1:
626  avctx->sample_aspect_ratio = (AVRational) {221, 1};
627  break;
628  }
629  return RET_COPY_AGAIN;
630  } else if (ret == BC_STS_SUCCESS) {
631  int copy_ret = -1;
632  if (output.PoutFlags & BC_POUT_FLAGS_PIB_VALID) {
633  print_frame_info(priv, &output);
634 
635  copy_ret = copy_frame(avctx, &output, frame, got_frame);
636  } else {
637  /*
638  * An invalid frame has been consumed.
639  */
640  av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput succeeded with "
641  "invalid PIB\n");
642  copy_ret = RET_COPY_AGAIN;
643  }
644  DtsReleaseOutputBuffs(dev, NULL, FALSE);
645 
646  return copy_ret;
647  } else if (ret == BC_STS_BUSY) {
648  return RET_COPY_AGAIN;
649  } else {
650  av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput failed %d\n", ret);
651  return RET_ERROR;
652  }
653 }
654 
655 static int crystalhd_decode_packet(AVCodecContext *avctx, const AVPacket *avpkt)
656 {
657  BC_STATUS bc_ret;
658  CHDContext *priv = avctx->priv_data;
659  HANDLE dev = priv->dev;
660  AVPacket filtered_packet = { 0 };
661  int ret = 0;
662 
663  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: decode_packet\n");
664 
665  if (avpkt && avpkt->size) {
666  uint64_t pts;
667 
668  /*
669  * Despite being notionally opaque, either libcrystalhd or
670  * the hardware itself will mangle pts values that are too
671  * small or too large. The docs claim it should be in units
672  * of 100ns. Given that we're nominally dealing with a black
673  * box on both sides, any transform we do has no guarantee of
674  * avoiding mangling so we need to build a mapping to values
675  * we know will not be mangled.
676  */
677  pts = opaque_list_push(priv, avpkt->pts);
678  if (!pts) {
679  ret = AVERROR(ENOMEM);
680  goto exit;
681  }
682  av_log(priv->avctx, AV_LOG_VERBOSE,
683  "input \"pts\": %"PRIu64"\n", pts);
684  bc_ret = DtsProcInput(dev, avpkt->data, avpkt->size, pts, 0);
685  if (bc_ret == BC_STS_BUSY) {
686  av_log(avctx, AV_LOG_WARNING,
687  "CrystalHD: ProcInput returned busy\n");
688  ret = AVERROR(EAGAIN);
689  goto exit;
690  } else if (bc_ret != BC_STS_SUCCESS) {
691  av_log(avctx, AV_LOG_ERROR,
692  "CrystalHD: ProcInput failed: %u\n", ret);
693  ret = -1;
694  goto exit;
695  }
696  } else {
697  av_log(avctx, AV_LOG_INFO, "CrystalHD: No more input data\n");
698  priv->draining = 1;
699  ret = AVERROR_EOF;
700  goto exit;
701  }
702  exit:
703  av_packet_unref(&filtered_packet);
704  return ret;
705 }
706 
708 {
709  BC_STATUS bc_ret;
710  BC_DTS_STATUS decoder_status = { 0, };
711  CopyRet rec_ret;
712  CHDContext *priv = avctx->priv_data;
713  HANDLE dev = priv->dev;
714  int got_frame = 0;
715  int ret = 0;
716  AVPacket pkt = {0};
717 
718  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: receive_frame\n");
719 
720  ret = ff_decode_get_packet(avctx, &pkt);
721  if (ret < 0 && ret != AVERROR_EOF) {
722  return ret;
723  }
724 
725  while (pkt.size > DtsTxFreeSize(dev)) {
726  /*
727  * Block until there is space in the buffer for the next packet.
728  * We assume that the hardware will make forward progress at this
729  * point, although in pathological cases that may not happen.
730  */
731  av_log(avctx, AV_LOG_TRACE, "CrystalHD: Waiting for space in input buffer\n");
732  }
733 
734  ret = crystalhd_decode_packet(avctx, &pkt);
736  // crystalhd_is_buffer_full() should avoid this.
737  if (ret == AVERROR(EAGAIN)) {
739  }
740  if (ret < 0 && ret != AVERROR_EOF) {
741  return ret;
742  }
743 
744  do {
745  bc_ret = DtsGetDriverStatus(dev, &decoder_status);
746  if (bc_ret != BC_STS_SUCCESS) {
747  av_log(avctx, AV_LOG_ERROR, "CrystalHD: GetDriverStatus failed\n");
748  return -1;
749  }
750 
751  if (decoder_status.ReadyListCount == 0) {
752  av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Insufficient frames ready. Returning\n");
753  got_frame = 0;
754  rec_ret = RET_OK;
755  break;
756  }
757 
758  rec_ret = receive_frame(avctx, frame, &got_frame);
759  } while (rec_ret == RET_COPY_AGAIN);
760 
761  if (rec_ret == RET_ERROR) {
762  return -1;
763  } else if (got_frame == 0) {
764  return priv->draining ? AVERROR_EOF : AVERROR(EAGAIN);
765  } else {
766  return 0;
767  }
768 }
769 
770 #define DEFINE_CRYSTALHD_DECODER(x, X, bsf_name) \
771  static const AVClass x##_crystalhd_class = { \
772  .class_name = #x "_crystalhd", \
773  .item_name = av_default_item_name, \
774  .option = options, \
775  .version = LIBAVUTIL_VERSION_INT, \
776  }; \
777  AVCodec ff_##x##_crystalhd_decoder = { \
778  .name = #x "_crystalhd", \
779  .long_name = NULL_IF_CONFIG_SMALL("CrystalHD " #X " decoder"), \
780  .type = AVMEDIA_TYPE_VIDEO, \
781  .id = AV_CODEC_ID_##X, \
782  .priv_data_size = sizeof(CHDContext), \
783  .priv_class = &x##_crystalhd_class, \
784  .init = init, \
785  .close = uninit, \
786  .receive_frame = crystalhd_receive_frame, \
787  .flush = flush, \
788  .bsfs = bsf_name, \
789  .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AVOID_PROBING | AV_CODEC_CAP_HARDWARE, \
790  .pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE}, \
791  .wrapper_name = "crystalhd", \
792  };
793 
794 #if CONFIG_H264_CRYSTALHD_DECODER
795 DEFINE_CRYSTALHD_DECODER(h264, H264, "h264_mp4toannexb")
796 #endif
797 
798 #if CONFIG_MPEG2_CRYSTALHD_DECODER
799 DEFINE_CRYSTALHD_DECODER(mpeg2, MPEG2VIDEO, NULL)
800 #endif
801 
802 #if CONFIG_MPEG4_CRYSTALHD_DECODER
803 DEFINE_CRYSTALHD_DECODER(mpeg4, MPEG4, "mpeg4_unpack_bframes")
804 #endif
805 
806 #if CONFIG_MSMPEG4_CRYSTALHD_DECODER
807 DEFINE_CRYSTALHD_DECODER(msmpeg4, MSMPEG4V3, NULL)
808 #endif
809 
810 #if CONFIG_VC1_CRYSTALHD_DECODER
812 #endif
813 
814 #if CONFIG_WMV3_CRYSTALHD_DECODER
815 DEFINE_CRYSTALHD_DECODER(wmv3, WMV3, NULL)
816 #endif
options
static const AVOption options[]
Definition: crystalhd.c:106
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:599
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
ff_decode_get_packet
int ff_decode_get_packet(AVCodecContext *avctx, AVPacket *pkt)
Called by decoders to get the next packet for decoding.
Definition: decode.c:329
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AV_OPT_FLAG_VIDEO_PARAM
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:279
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
CHDContext::sWidth
uint32_t sWidth
Definition: crystalhd.c:103
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
AV_CODEC_ID_MPEG4
@ AV_CODEC_ID_MPEG4
Definition: avcodec.h:230
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
internal.h
AVPacket::data
uint8_t * data
Definition: avcodec.h:1477
AVOption
AVOption.
Definition: opt.h:246
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
HANDLE
PVOID HANDLE
Definition: basicDataTypeConversions.h:21
CHDContext::is_70012
uint8_t is_70012
Definition: crystalhd.c:95
id2subtype
static BC_MEDIA_SUBTYPE id2subtype(CHDContext *priv, enum AVCodecID id)
Definition: crystalhd.c:120
av_image_copy_plane
void av_image_copy_plane(uint8_t *dst, int dst_linesize, const uint8_t *src, int src_linesize, int bytewidth, int height)
Copy image plane from src to dst.
Definition: imgutils.c:338
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:1574
fail
#define fail()
Definition: checkasm.h:120
copy_frame
static CopyRet copy_frame(AVCodecContext *avctx, BC_DTS_PROC_OUT *output, AVFrame *frame, int *got_frame)
Definition: crystalhd.c:421
pts
static int64_t pts
Definition: transcode_aac.c:647
RET_COPY_AGAIN
@ RET_COPY_AGAIN
Definition: crystalhd.c:83
src
#define src
Definition: vp8dsp.c:254
CHDContext::av_class
AVClass * av_class
Definition: crystalhd.c:91
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:202
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
av_cold
#define av_cold
Definition: attributes.h:84
OUTPUT_PROC_TIMEOUT
#define OUTPUT_PROC_TIMEOUT
Timeout parameter passed to DtsProcOutput() in us.
Definition: crystalhd.c:69
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:1667
width
#define width
intreadwrite.h
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
OpaqueList::fake_timestamp
uint64_t fake_timestamp
Definition: crystalhd.c:86
OpaqueList
Definition: crystalhd.c:84
decode.h
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: avcodec.h:245
version
int version
Definition: avisynth_c.h:858
OpaqueList::reordered_opaque
uint64_t reordered_opaque
Definition: crystalhd.c:87
AV_CODEC_ID_WMV3
@ AV_CODEC_ID_WMV3
Definition: avcodec.h:289
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
OpaqueList::next
struct OpaqueList * next
Definition: crystalhd.c:85
NULL
#define NULL
Definition: coverity.c:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
receive_frame
static CopyRet receive_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame)
Definition: crystalhd.c:560
init
static av_cold int init(AVCodecContext *avctx)
Definition: crystalhd.c:301
opaque_list_push
static uint64_t opaque_list_push(CHDContext *priv, uint64_t reordered_opaque)
Definition: crystalhd.c:186
RET_ERROR
@ RET_ERROR
Definition: crystalhd.c:81
CHDContext::tail
OpaqueList * tail
Definition: crystalhd.c:100
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: avcodec.h:215
FALSE
#define FALSE
Definition: windows2linux.h:37
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1965
AVPacket::size
int size
Definition: avcodec.h:1478
CHDContext::draining
uint8_t draining
Definition: crystalhd.c:97
TRUE
#define TRUE
Definition: windows2linux.h:33
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
crystalhd_decode_packet
static int crystalhd_decode_packet(AVCodecContext *avctx, const AVPacket *avpkt)
Definition: crystalhd.c:655
opaque_list_pop
static OpaqueList * opaque_list_pop(CHDContext *priv, uint64_t fake_timestamp)
Definition: crystalhd.c:214
CHDContext::head
OpaqueList * head
Definition: crystalhd.c:99
height
#define height
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
CHDContext::dev
HANDLE dev
Definition: crystalhd.c:93
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
AVCodec::id
enum AVCodecID id
Definition: avcodec.h:3495
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
DEFINE_CRYSTALHD_DECODER
#define DEFINE_CRYSTALHD_DECODER(x, X, bsf_name)
Definition: crystalhd.c:770
interlaced
uint8_t interlaced
Definition: mxfenc.c:2217
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: avcodec.h:1470
AVCodecContext::extradata
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1666
CopyRet
CopyRet
Definition: crystalhd.c:78
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:277
uint8_t
uint8_t
Definition: audio_convert.c:194
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
RET_OK
@ RET_OK
Definition: crystalhd.c:82
AVCodec::name
const char * name
Name of the codec implementation.
Definition: avcodec.h:3488
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: avcodec.h:288
AVCodecContext::height
int height
Definition: avcodec.h:1738
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1775
avcodec.h
ret
ret
Definition: filter_design.txt:187
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVCodecContext
main external API structure.
Definition: avcodec.h:1565
pkt
static AVPacket pkt
Definition: demuxing_decoding.c:54
mode
mode
Definition: ebur128.h:83
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:223
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
crystalhd_receive_frame
static int crystalhd_receive_frame(AVCodecContext *avctx, AVFrame *frame)
Definition: crystalhd.c:707
TIMESTAMP_UNIT
#define TIMESTAMP_UNIT
Step between fake timestamps passed to hardware in units of 100ns.
Definition: crystalhd.c:71
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVPacket
This structure stores compressed data.
Definition: avcodec.h:1454
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:1592
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:1738
flush
static void flush(AVCodecContext *avctx)
Definition: crystalhd.c:267
imgutils.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
AV_CODEC_ID_MSMPEG4V3
@ AV_CODEC_ID_MSMPEG4V3
Definition: avcodec.h:234
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: avcodec.h:220
print_frame_info
static void print_frame_info(CHDContext *priv, BC_DTS_PROC_OUT *output)
Definition: crystalhd.c:140
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:1944
CHDContext::avctx
AVCodecContext * avctx
Definition: crystalhd.c:92
CHDContext::need_second_field
uint8_t need_second_field
Definition: crystalhd.c:96
CHDContext
Definition: crystalhd.c:90