FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
vc1dec.c
Go to the documentation of this file.
1 /*
2  * VC-1 and WMV3 decoder
3  * Copyright (c) 2011 Mashiat Sarker Shakkhar
4  * Copyright (c) 2006-2007 Konstantin Shishkov
5  * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 /**
25  * @file
26  * VC-1 and WMV3 decoder
27  */
28 
29 #include "avcodec.h"
30 #include "blockdsp.h"
31 #include "get_bits.h"
32 #include "internal.h"
33 #include "mpeg_er.h"
34 #include "mpegvideo.h"
35 #include "msmpeg4data.h"
36 #include "vc1.h"
37 #include "vc1data.h"
38 #include "vdpau_internal.h"
39 #include "libavutil/avassert.h"
40 
41 
42 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
43 
44 typedef struct {
45  /**
46  * Transform coefficients for both sprites in 16.16 fixed point format,
47  * in the order they appear in the bitstream:
48  * x scale
49  * rotation 1 (unused)
50  * x offset
51  * rotation 2 (unused)
52  * y scale
53  * y offset
54  * alpha
55  */
56  int coefs[2][7];
57 
58  int effect_type, effect_flag;
59  int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
60  int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
61 } SpriteData;
62 
63 static inline int get_fp_val(GetBitContext* gb)
64 {
65  return (get_bits_long(gb, 30) - (1 << 29)) << 1;
66 }
67 
68 static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
69 {
70  c[1] = c[3] = 0;
71 
72  switch (get_bits(gb, 2)) {
73  case 0:
74  c[0] = 1 << 16;
75  c[2] = get_fp_val(gb);
76  c[4] = 1 << 16;
77  break;
78  case 1:
79  c[0] = c[4] = get_fp_val(gb);
80  c[2] = get_fp_val(gb);
81  break;
82  case 2:
83  c[0] = get_fp_val(gb);
84  c[2] = get_fp_val(gb);
85  c[4] = get_fp_val(gb);
86  break;
87  case 3:
88  c[0] = get_fp_val(gb);
89  c[1] = get_fp_val(gb);
90  c[2] = get_fp_val(gb);
91  c[3] = get_fp_val(gb);
92  c[4] = get_fp_val(gb);
93  break;
94  }
95  c[5] = get_fp_val(gb);
96  if (get_bits1(gb))
97  c[6] = get_fp_val(gb);
98  else
99  c[6] = 1 << 16;
100 }
101 
102 static int vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
103 {
104  AVCodecContext *avctx = v->s.avctx;
105  int sprite, i;
106 
107  for (sprite = 0; sprite <= v->two_sprites; sprite++) {
108  vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
109  if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
110  avpriv_request_sample(avctx, "Non-zero rotation coefficients");
111  av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
112  for (i = 0; i < 7; i++)
113  av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
114  sd->coefs[sprite][i] / (1<<16),
115  (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
116  av_log(avctx, AV_LOG_DEBUG, "\n");
117  }
118 
119  skip_bits(gb, 2);
120  if (sd->effect_type = get_bits_long(gb, 30)) {
121  switch (sd->effect_pcount1 = get_bits(gb, 4)) {
122  case 7:
123  vc1_sprite_parse_transform(gb, sd->effect_params1);
124  break;
125  case 14:
126  vc1_sprite_parse_transform(gb, sd->effect_params1);
127  vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
128  break;
129  default:
130  for (i = 0; i < sd->effect_pcount1; i++)
131  sd->effect_params1[i] = get_fp_val(gb);
132  }
133  if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
134  // effect 13 is simple alpha blending and matches the opacity above
135  av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
136  for (i = 0; i < sd->effect_pcount1; i++)
137  av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
138  sd->effect_params1[i] / (1 << 16),
139  (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
140  av_log(avctx, AV_LOG_DEBUG, "\n");
141  }
142 
143  sd->effect_pcount2 = get_bits(gb, 16);
144  if (sd->effect_pcount2 > 10) {
145  av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
146  return AVERROR_INVALIDDATA;
147  } else if (sd->effect_pcount2) {
148  i = -1;
149  av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
150  while (++i < sd->effect_pcount2) {
151  sd->effect_params2[i] = get_fp_val(gb);
152  av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
153  sd->effect_params2[i] / (1 << 16),
154  (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
155  }
156  av_log(avctx, AV_LOG_DEBUG, "\n");
157  }
158  }
159  if (sd->effect_flag = get_bits1(gb))
160  av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
161 
162  if (get_bits_count(gb) >= gb->size_in_bits +
163  (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0)) {
164  av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
165  return AVERROR_INVALIDDATA;
166  }
167  if (get_bits_count(gb) < gb->size_in_bits - 8)
168  av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
169 
170  return 0;
171 }
172 
173 static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
174 {
175  int i, plane, row, sprite;
176  int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
177  uint8_t* src_h[2][2];
178  int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
179  int ysub[2];
180  MpegEncContext *s = &v->s;
181 
182  for (i = 0; i <= v->two_sprites; i++) {
183  xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
184  xadv[i] = sd->coefs[i][0];
185  if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
186  xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
187 
188  yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
189  yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
190  }
191  alpha = av_clip(sd->coefs[1][6], 0, (1<<16) - 1);
192 
193  for (plane = 0; plane < (s->flags&CODEC_FLAG_GRAY ? 1 : 3); plane++) {
194  int width = v->output_width>>!!plane;
195 
196  for (row = 0; row < v->output_height>>!!plane; row++) {
197  uint8_t *dst = v->sprite_output_frame->data[plane] +
198  v->sprite_output_frame->linesize[plane] * row;
199 
200  for (sprite = 0; sprite <= v->two_sprites; sprite++) {
201  uint8_t *iplane = s->current_picture.f->data[plane];
202  int iline = s->current_picture.f->linesize[plane];
203  int ycoord = yoff[sprite] + yadv[sprite] * row;
204  int yline = ycoord >> 16;
205  int next_line;
206  ysub[sprite] = ycoord & 0xFFFF;
207  if (sprite) {
208  iplane = s->last_picture.f->data[plane];
209  iline = s->last_picture.f->linesize[plane];
210  }
211  next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
212  if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
213  src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
214  if (ysub[sprite])
215  src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
216  } else {
217  if (sr_cache[sprite][0] != yline) {
218  if (sr_cache[sprite][1] == yline) {
219  FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
220  FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
221  } else {
222  v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
223  sr_cache[sprite][0] = yline;
224  }
225  }
226  if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
227  v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
228  iplane + next_line, xoff[sprite],
229  xadv[sprite], width);
230  sr_cache[sprite][1] = yline + 1;
231  }
232  src_h[sprite][0] = v->sr_rows[sprite][0];
233  src_h[sprite][1] = v->sr_rows[sprite][1];
234  }
235  }
236 
237  if (!v->two_sprites) {
238  if (ysub[0]) {
239  v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
240  } else {
241  memcpy(dst, src_h[0][0], width);
242  }
243  } else {
244  if (ysub[0] && ysub[1]) {
245  v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
246  src_h[1][0], src_h[1][1], ysub[1], alpha, width);
247  } else if (ysub[0]) {
248  v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
249  src_h[1][0], alpha, width);
250  } else if (ysub[1]) {
251  v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
252  src_h[0][0], (1<<16)-1-alpha, width);
253  } else {
254  v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
255  }
256  }
257  }
258 
259  if (!plane) {
260  for (i = 0; i <= v->two_sprites; i++) {
261  xoff[i] >>= 1;
262  yoff[i] >>= 1;
263  }
264  }
265 
266  }
267 }
268 
269 
270 static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
271 {
272  int ret;
273  MpegEncContext *s = &v->s;
274  AVCodecContext *avctx = s->avctx;
275  SpriteData sd;
276 
277  memset(&sd, 0, sizeof(sd));
278 
279  ret = vc1_parse_sprites(v, gb, &sd);
280  if (ret < 0)
281  return ret;
282 
283  if (!s->current_picture.f || !s->current_picture.f->data[0]) {
284  av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
285  return -1;
286  }
287 
288  if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
289  av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
290  v->two_sprites = 0;
291  }
292 
294  if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
295  return ret;
296 
297  vc1_draw_sprites(v, &sd);
298 
299  return 0;
300 }
301 
302 static void vc1_sprite_flush(AVCodecContext *avctx)
303 {
304  VC1Context *v = avctx->priv_data;
305  MpegEncContext *s = &v->s;
306  AVFrame *f = s->current_picture.f;
307  int plane, i;
308 
309  /* Windows Media Image codecs have a convergence interval of two keyframes.
310  Since we can't enforce it, clear to black the missing sprite. This is
311  wrong but it looks better than doing nothing. */
312 
313  if (f && f->data[0])
314  for (plane = 0; plane < (s->flags&CODEC_FLAG_GRAY ? 1 : 3); plane++)
315  for (i = 0; i < v->sprite_height>>!!plane; i++)
316  memset(f->data[plane] + i * f->linesize[plane],
317  plane ? 128 : 0, f->linesize[plane]);
318 }
319 
320 #endif
321 
323 {
324  MpegEncContext *s = &v->s;
325  int i;
326  int mb_height = FFALIGN(s->mb_height, 2);
327 
328  /* Allocate mb bitplanes */
329  v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
330  v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
331  v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
332  v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
333  v->acpred_plane = av_malloc (s->mb_stride * mb_height);
334  v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
335 
336  v->n_allocated_blks = s->mb_width + 2;
337  v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
338  v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 2 * s->mb_stride);
339  v->cbp = v->cbp_base + s->mb_stride;
340  v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 2 * s->mb_stride);
341  v->ttblk = v->ttblk_base + s->mb_stride;
342  v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 2 * s->mb_stride);
343  v->is_intra = v->is_intra_base + s->mb_stride;
344  v->luma_mv_base = av_mallocz(sizeof(v->luma_mv_base[0]) * 2 * s->mb_stride);
345  v->luma_mv = v->luma_mv_base + s->mb_stride;
346 
347  /* allocate block type info in that way so it could be used with s->block_index[] */
348  v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
349  v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
350  v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
351  v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
352 
353  /* allocate memory to store block level MV info */
354  v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
355  v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
356  v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
357  v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
358  v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
359  v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
360  v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
361  v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
362 
363  /* Init coded blocks info */
364  if (v->profile == PROFILE_ADVANCED) {
365 // if (alloc_bitplane(&v->over_flags_plane, s->mb_width, s->mb_height) < 0)
366 // return -1;
367 // if (alloc_bitplane(&v->ac_pred_plane, s->mb_width, s->mb_height) < 0)
368 // return -1;
369  }
370 
371  ff_intrax8_common_init(&v->x8,s);
372 
374  for (i = 0; i < 4; i++)
375  if (!(v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width)))
376  return AVERROR(ENOMEM);
377  }
378 
379  if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->acpred_plane || !v->over_flags_plane ||
380  !v->block || !v->cbp_base || !v->ttblk_base || !v->is_intra_base || !v->luma_mv_base ||
381  !v->mb_type_base) {
384  av_freep(&v->acpred_plane);
386  av_freep(&v->block);
387  av_freep(&v->cbp_base);
388  av_freep(&v->ttblk_base);
389  av_freep(&v->is_intra_base);
390  av_freep(&v->luma_mv_base);
391  av_freep(&v->mb_type_base);
392  return AVERROR(ENOMEM);
393  }
394 
395  return 0;
396 }
397 
399 {
400  int i;
401  for (i = 0; i < 64; i++) {
402 #define transpose(x) (((x) >> 3) | (((x) & 7) << 3))
403  v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
404  v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
405  v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
406  v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
408  }
409  v->left_blk_sh = 0;
410  v->top_blk_sh = 3;
411 }
412 
413 /** Initialize a VC1/WMV3 decoder
414  * @todo TODO: Handle VC-1 IDUs (Transport level?)
415  * @todo TODO: Decypher remaining bits in extra_data
416  */
418 {
419  VC1Context *v = avctx->priv_data;
420  MpegEncContext *s = &v->s;
421  GetBitContext gb;
422  int ret;
423 
424  /* save the container output size for WMImage */
425  v->output_width = avctx->width;
426  v->output_height = avctx->height;
427 
428  if (!avctx->extradata_size || !avctx->extradata)
429  return -1;
430  if (!(avctx->flags & CODEC_FLAG_GRAY))
431  avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
432  else
433  avctx->pix_fmt = AV_PIX_FMT_GRAY8;
434  v->s.avctx = avctx;
435 
436  if ((ret = ff_vc1_init_common(v)) < 0)
437  return ret;
438  // ensure static VLC tables are initialized
439  if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
440  return ret;
441  if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
442  return ret;
443  // Hack to ensure the above functions will be called
444  // again once we know all necessary settings.
445  // That this is necessary might indicate a bug.
446  ff_vc1_decode_end(avctx);
447 
448  ff_blockdsp_init(&s->bdsp, avctx);
450  ff_qpeldsp_init(&s->qdsp);
451 
452  if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
453  int count = 0;
454 
455  // looks like WMV3 has a sequence header stored in the extradata
456  // advanced sequence header may be before the first frame
457  // the last byte of the extradata is a version number, 1 for the
458  // samples we can decode
459 
460  init_get_bits(&gb, avctx->extradata, avctx->extradata_size*8);
461 
462  if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0)
463  return ret;
464 
465  count = avctx->extradata_size*8 - get_bits_count(&gb);
466  if (count > 0) {
467  av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
468  count, get_bits(&gb, count));
469  } else if (count < 0) {
470  av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
471  }
472  } else { // VC1/WVC1/WVP2
473  const uint8_t *start = avctx->extradata;
474  uint8_t *end = avctx->extradata + avctx->extradata_size;
475  const uint8_t *next;
476  int size, buf2_size;
477  uint8_t *buf2 = NULL;
478  int seq_initialized = 0, ep_initialized = 0;
479 
480  if (avctx->extradata_size < 16) {
481  av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
482  return -1;
483  }
484 
486  start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
487  next = start;
488  for (; next < end; start = next) {
489  next = find_next_marker(start + 4, end);
490  size = next - start - 4;
491  if (size <= 0)
492  continue;
493  buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
494  init_get_bits(&gb, buf2, buf2_size * 8);
495  switch (AV_RB32(start)) {
496  case VC1_CODE_SEQHDR:
497  if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0) {
498  av_free(buf2);
499  return ret;
500  }
501  seq_initialized = 1;
502  break;
503  case VC1_CODE_ENTRYPOINT:
504  if ((ret = ff_vc1_decode_entry_point(avctx, v, &gb)) < 0) {
505  av_free(buf2);
506  return ret;
507  }
508  ep_initialized = 1;
509  break;
510  }
511  }
512  av_free(buf2);
513  if (!seq_initialized || !ep_initialized) {
514  av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
515  return -1;
516  }
517  v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
518  }
519 
521  if (!v->sprite_output_frame)
522  return AVERROR(ENOMEM);
523 
524  avctx->profile = v->profile;
525  if (v->profile == PROFILE_ADVANCED)
526  avctx->level = v->level;
527 
528  avctx->has_b_frames = !!avctx->max_b_frames;
529 
530  if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
531  avctx->color_primaries = v->color_prim;
532  if (v->transfer_char == 1 || v->transfer_char == 7)
533  avctx->color_trc = v->transfer_char;
534  if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
535  avctx->colorspace = v->matrix_coef;
536 
537  s->mb_width = (avctx->coded_width + 15) >> 4;
538  s->mb_height = (avctx->coded_height + 15) >> 4;
539 
540  if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
542  } else {
543  memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
544  v->left_blk_sh = 3;
545  v->top_blk_sh = 0;
546  }
547 
548  if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
549  v->sprite_width = avctx->coded_width;
550  v->sprite_height = avctx->coded_height;
551 
552  avctx->coded_width = avctx->width = v->output_width;
553  avctx->coded_height = avctx->height = v->output_height;
554 
555  // prevent 16.16 overflows
556  if (v->sprite_width > 1 << 14 ||
557  v->sprite_height > 1 << 14 ||
558  v->output_width > 1 << 14 ||
559  v->output_height > 1 << 14) return -1;
560 
561  if ((v->sprite_width&1) || (v->sprite_height&1)) {
562  avpriv_request_sample(avctx, "odd sprites support");
563  return AVERROR_PATCHWELCOME;
564  }
565  }
566  return 0;
567 }
568 
569 /** Close a VC1/WMV3 decoder
570  * @warning Initial try at using MpegEncContext stuff
571  */
573 {
574  VC1Context *v = avctx->priv_data;
575  int i;
576 
578 
579  for (i = 0; i < 4; i++)
580  av_freep(&v->sr_rows[i >> 1][i & 1]);
581  av_freep(&v->hrd_rate);
582  av_freep(&v->hrd_buffer);
583  ff_mpv_common_end(&v->s);
587  av_freep(&v->fieldtx_plane);
588  av_freep(&v->acpred_plane);
590  av_freep(&v->mb_type_base);
592  av_freep(&v->mv_f_base);
594  av_freep(&v->block);
595  av_freep(&v->cbp_base);
596  av_freep(&v->ttblk_base);
597  av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
598  av_freep(&v->luma_mv_base);
600  return 0;
601 }
602 
603 
604 /** Decode a VC1/WMV3 frame
605  * @todo TODO: Handle VC-1 IDUs (Transport level?)
606  */
607 static int vc1_decode_frame(AVCodecContext *avctx, void *data,
608  int *got_frame, AVPacket *avpkt)
609 {
610  const uint8_t *buf = avpkt->data;
611  int buf_size = avpkt->size, n_slices = 0, i, ret;
612  VC1Context *v = avctx->priv_data;
613  MpegEncContext *s = &v->s;
614  AVFrame *pict = data;
615  uint8_t *buf2 = NULL;
616  const uint8_t *buf_start = buf, *buf_start_second_field = NULL;
617  int mb_height, n_slices1=-1;
618  struct {
619  uint8_t *buf;
620  GetBitContext gb;
621  int mby_start;
622  } *slices = NULL, *tmp;
623 
624  v->second_field = 0;
625 
626  if(s->flags & CODEC_FLAG_LOW_DELAY)
627  s->low_delay = 1;
628 
629  /* no supplementary picture */
630  if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
631  /* special case for last picture */
632  if (s->low_delay == 0 && s->next_picture_ptr) {
633  if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
634  return ret;
635  s->next_picture_ptr = NULL;
636 
637  *got_frame = 1;
638  }
639 
640  return buf_size;
641  }
642 
644  if (v->profile < PROFILE_ADVANCED)
646  else
647  avctx->pix_fmt = AV_PIX_FMT_VDPAU_VC1;
648  }
649 
650  //for advanced profile we may need to parse and unescape data
651  if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
652  int buf_size2 = 0;
653  buf2 = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
654  if (!buf2)
655  return AVERROR(ENOMEM);
656 
657  if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
658  const uint8_t *start, *end, *next;
659  int size;
660 
661  next = buf;
662  for (start = buf, end = buf + buf_size; next < end; start = next) {
663  next = find_next_marker(start + 4, end);
664  size = next - start - 4;
665  if (size <= 0) continue;
666  switch (AV_RB32(start)) {
667  case VC1_CODE_FRAME:
668  if (avctx->hwaccel ||
670  buf_start = start;
671  buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
672  break;
673  case VC1_CODE_FIELD: {
674  int buf_size3;
675  if (avctx->hwaccel ||
677  buf_start_second_field = start;
678  tmp = av_realloc_array(slices, sizeof(*slices), (n_slices+1));
679  if (!tmp)
680  goto err;
681  slices = tmp;
682  slices[n_slices].buf = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
683  if (!slices[n_slices].buf)
684  goto err;
685  buf_size3 = vc1_unescape_buffer(start + 4, size,
686  slices[n_slices].buf);
687  init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
688  buf_size3 << 3);
689  /* assuming that the field marker is at the exact middle,
690  hope it's correct */
691  slices[n_slices].mby_start = s->mb_height + 1 >> 1;
692  n_slices1 = n_slices - 1; // index of the last slice of the first field
693  n_slices++;
694  break;
695  }
696  case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
697  buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
698  init_get_bits(&s->gb, buf2, buf_size2 * 8);
699  ff_vc1_decode_entry_point(avctx, v, &s->gb);
700  break;
701  case VC1_CODE_SLICE: {
702  int buf_size3;
703  tmp = av_realloc_array(slices, sizeof(*slices), (n_slices+1));
704  if (!tmp)
705  goto err;
706  slices = tmp;
707  slices[n_slices].buf = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
708  if (!slices[n_slices].buf)
709  goto err;
710  buf_size3 = vc1_unescape_buffer(start + 4, size,
711  slices[n_slices].buf);
712  init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
713  buf_size3 << 3);
714  slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
715  n_slices++;
716  break;
717  }
718  }
719  }
720  } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
721  const uint8_t *divider;
722  int buf_size3;
723 
724  divider = find_next_marker(buf, buf + buf_size);
725  if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
726  av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
727  goto err;
728  } else { // found field marker, unescape second field
729  if (avctx->hwaccel ||
731  buf_start_second_field = divider;
732  tmp = av_realloc_array(slices, sizeof(*slices), (n_slices+1));
733  if (!tmp)
734  goto err;
735  slices = tmp;
736  slices[n_slices].buf = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE);
737  if (!slices[n_slices].buf)
738  goto err;
739  buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
740  init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
741  buf_size3 << 3);
742  slices[n_slices].mby_start = s->mb_height + 1 >> 1;
743  n_slices1 = n_slices - 1;
744  n_slices++;
745  }
746  buf_size2 = vc1_unescape_buffer(buf, divider - buf, buf2);
747  } else {
748  buf_size2 = vc1_unescape_buffer(buf, buf_size, buf2);
749  }
750  init_get_bits(&s->gb, buf2, buf_size2*8);
751  } else
752  init_get_bits(&s->gb, buf, buf_size*8);
753 
754  if (v->res_sprite) {
755  v->new_sprite = !get_bits1(&s->gb);
756  v->two_sprites = get_bits1(&s->gb);
757  /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
758  we're using the sprite compositor. These are intentionally kept separate
759  so you can get the raw sprites by using the wmv3 decoder for WMVP or
760  the vc1 one for WVP2 */
761  if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
762  if (v->new_sprite) {
763  // switch AVCodecContext parameters to those of the sprites
764  avctx->width = avctx->coded_width = v->sprite_width;
765  avctx->height = avctx->coded_height = v->sprite_height;
766  } else {
767  goto image;
768  }
769  }
770  }
771 
772  if (s->context_initialized &&
773  (s->width != avctx->coded_width ||
774  s->height != avctx->coded_height)) {
775  ff_vc1_decode_end(avctx);
776  }
777 
778  if (!s->context_initialized) {
779  if (ff_msmpeg4_decode_init(avctx) < 0)
780  goto err;
781  if (ff_vc1_decode_init_alloc_tables(v) < 0) {
783  goto err;
784  }
785 
786  s->low_delay = !avctx->has_b_frames || v->res_sprite;
787 
788  if (v->profile == PROFILE_ADVANCED) {
789  if(avctx->coded_width<=1 || avctx->coded_height<=1)
790  goto err;
791  s->h_edge_pos = avctx->coded_width;
792  s->v_edge_pos = avctx->coded_height;
793  }
794  }
795 
796  // do parse frame header
797  v->pic_header_flag = 0;
798  v->first_pic_header_flag = 1;
799  if (v->profile < PROFILE_ADVANCED) {
800  if (ff_vc1_parse_frame_header(v, &s->gb) < 0) {
801  goto err;
802  }
803  } else {
804  if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
805  goto err;
806  }
807  }
808  v->first_pic_header_flag = 0;
809 
810  if (avctx->debug & FF_DEBUG_PICT_INFO)
811  av_log(v->s.avctx, AV_LOG_DEBUG, "pict_type: %c\n", av_get_picture_type_char(s->pict_type));
812 
813  if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
814  && s->pict_type != AV_PICTURE_TYPE_I) {
815  av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
816  goto err;
817  }
818 
819  if ((s->mb_height >> v->field_mode) == 0) {
820  av_log(v->s.avctx, AV_LOG_ERROR, "image too short\n");
821  goto err;
822  }
823 
824  // for skipping the frame
827 
828  /* skip B-frames if we don't have reference frames */
829  if (!s->last_picture_ptr && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) {
830  av_log(v->s.avctx, AV_LOG_DEBUG, "Skipping B frame without reference frames\n");
831  goto end;
832  }
833  if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
835  avctx->skip_frame >= AVDISCARD_ALL) {
836  goto end;
837  }
838 
839  if (s->next_p_frame_damaged) {
840  if (s->pict_type == AV_PICTURE_TYPE_B)
841  goto end;
842  else
843  s->next_p_frame_damaged = 0;
844  }
845 
846  if (ff_mpv_frame_start(s, avctx) < 0) {
847  goto err;
848  }
849 
853 
854  // process pulldown flags
856  // Pulldown flags are only valid when 'broadcast' has been set.
857  // So ticks_per_frame will be 2
858  if (v->rff) {
859  // repeat field
861  } else if (v->rptfrm) {
862  // repeat frames
863  s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
864  }
865 
868 
869  if ((CONFIG_VC1_VDPAU_DECODER)
871  if (v->field_mode && buf_start_second_field) {
872  ff_vdpau_vc1_decode_picture(s, buf_start, buf_start_second_field - buf_start);
873  ff_vdpau_vc1_decode_picture(s, buf_start_second_field, (buf + buf_size) - buf_start_second_field);
874  } else {
875  ff_vdpau_vc1_decode_picture(s, buf_start, (buf + buf_size) - buf_start);
876  }
877  } else if (avctx->hwaccel) {
878  if (v->field_mode && buf_start_second_field) {
879  // decode first field
881  if (avctx->hwaccel->start_frame(avctx, buf_start, buf_start_second_field - buf_start) < 0)
882  goto err;
883  if (avctx->hwaccel->decode_slice(avctx, buf_start, buf_start_second_field - buf_start) < 0)
884  goto err;
885  if (avctx->hwaccel->end_frame(avctx) < 0)
886  goto err;
887 
888  // decode second field
889  s->gb = slices[n_slices1 + 1].gb;
891  v->second_field = 1;
892  v->pic_header_flag = 0;
893  if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
894  av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed");
895  goto err;
896  }
898 
899  if (avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field) < 0)
900  goto err;
901  if (avctx->hwaccel->decode_slice(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field) < 0)
902  goto err;
903  if (avctx->hwaccel->end_frame(avctx) < 0)
904  goto err;
905  } else {
907  if (avctx->hwaccel->start_frame(avctx, buf_start, (buf + buf_size) - buf_start) < 0)
908  goto err;
909  if (avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start) < 0)
910  goto err;
911  if (avctx->hwaccel->end_frame(avctx) < 0)
912  goto err;
913  }
914  } else {
915  int header_ret = 0;
916 
918 
919  v->bits = buf_size * 8;
920  v->end_mb_x = s->mb_width;
921  if (v->field_mode) {
922  s->current_picture.f->linesize[0] <<= 1;
923  s->current_picture.f->linesize[1] <<= 1;
924  s->current_picture.f->linesize[2] <<= 1;
925  s->linesize <<= 1;
926  s->uvlinesize <<= 1;
927  }
928  mb_height = s->mb_height >> v->field_mode;
929 
930  av_assert0 (mb_height > 0);
931 
932  for (i = 0; i <= n_slices; i++) {
933  if (i > 0 && slices[i - 1].mby_start >= mb_height) {
934  if (v->field_mode <= 0) {
935  av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
936  "picture boundary (%d >= %d)\n", i,
937  slices[i - 1].mby_start, mb_height);
938  continue;
939  }
940  v->second_field = 1;
941  av_assert0((s->mb_height & 1) == 0);
942  v->blocks_off = s->b8_stride * (s->mb_height&~1);
943  v->mb_off = s->mb_stride * s->mb_height >> 1;
944  } else {
945  v->second_field = 0;
946  v->blocks_off = 0;
947  v->mb_off = 0;
948  }
949  if (i) {
950  v->pic_header_flag = 0;
951  if (v->field_mode && i == n_slices1 + 2) {
952  if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
953  av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
954  if (avctx->err_recognition & AV_EF_EXPLODE)
955  goto err;
956  continue;
957  }
958  } else if (get_bits1(&s->gb)) {
959  v->pic_header_flag = 1;
960  if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
961  av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
962  if (avctx->err_recognition & AV_EF_EXPLODE)
963  goto err;
964  continue;
965  }
966  }
967  }
968  if (header_ret < 0)
969  continue;
970  s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
971  if (!v->field_mode || v->second_field)
972  s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
973  else {
974  if (i >= n_slices) {
975  av_log(v->s.avctx, AV_LOG_ERROR, "first field slice count too large\n");
976  continue;
977  }
978  s->end_mb_y = (i <= n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
979  }
980  if (s->end_mb_y <= s->start_mb_y) {
981  av_log(v->s.avctx, AV_LOG_ERROR, "end mb y %d %d invalid\n", s->end_mb_y, s->start_mb_y);
982  continue;
983  }
984  if (!v->p_frame_skipped && s->pict_type != AV_PICTURE_TYPE_I && !v->cbpcy_vlc) {
985  av_log(v->s.avctx, AV_LOG_ERROR, "missing cbpcy_vlc\n");
986  continue;
987  }
989  if (i != n_slices)
990  s->gb = slices[i].gb;
991  }
992  if (v->field_mode) {
993  v->second_field = 0;
994  s->current_picture.f->linesize[0] >>= 1;
995  s->current_picture.f->linesize[1] >>= 1;
996  s->current_picture.f->linesize[2] >>= 1;
997  s->linesize >>= 1;
998  s->uvlinesize >>= 1;
1000  FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
1001  FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
1002  }
1003  }
1004  av_dlog(s->avctx, "Consumed %i/%i bits\n",
1005  get_bits_count(&s->gb), s->gb.size_in_bits);
1006 // if (get_bits_count(&s->gb) > buf_size * 8)
1007 // return -1;
1009  goto err;
1010  if (!v->field_mode)
1011  ff_er_frame_end(&s->er);
1012  }
1013 
1014  ff_mpv_frame_end(s);
1015 
1016  if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
1017 image:
1018  avctx->width = avctx->coded_width = v->output_width;
1019  avctx->height = avctx->coded_height = v->output_height;
1020  if (avctx->skip_frame >= AVDISCARD_NONREF)
1021  goto end;
1022 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
1023  if (vc1_decode_sprites(v, &s->gb))
1024  goto err;
1025 #endif
1026  if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
1027  goto err;
1028  *got_frame = 1;
1029  } else {
1030  if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1031  if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
1032  goto err;
1034  *got_frame = 1;
1035  } else if (s->last_picture_ptr) {
1036  if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
1037  goto err;
1039  *got_frame = 1;
1040  }
1041  }
1042 
1043 end:
1044  av_free(buf2);
1045  for (i = 0; i < n_slices; i++)
1046  av_free(slices[i].buf);
1047  av_free(slices);
1048  return buf_size;
1049 
1050 err:
1051  av_free(buf2);
1052  for (i = 0; i < n_slices; i++)
1053  av_free(slices[i].buf);
1054  av_free(slices);
1055  return -1;
1056 }
1057 
1058 
1059 static const AVProfile profiles[] = {
1060  { FF_PROFILE_VC1_SIMPLE, "Simple" },
1061  { FF_PROFILE_VC1_MAIN, "Main" },
1062  { FF_PROFILE_VC1_COMPLEX, "Complex" },
1063  { FF_PROFILE_VC1_ADVANCED, "Advanced" },
1064  { FF_PROFILE_UNKNOWN },
1065 };
1066 
1068 #if CONFIG_VC1_DXVA2_HWACCEL
1070 #endif
1071 #if CONFIG_VC1_VAAPI_HWACCEL
1073 #endif
1074 #if CONFIG_VC1_VDPAU_HWACCEL
1076 #endif
1079 };
1080 
1082  .name = "vc1",
1083  .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
1084  .type = AVMEDIA_TYPE_VIDEO,
1085  .id = AV_CODEC_ID_VC1,
1086  .priv_data_size = sizeof(VC1Context),
1087  .init = vc1_decode_init,
1090  .flush = ff_mpeg_flush,
1091  .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
1092  .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1093  .profiles = NULL_IF_CONFIG_SMALL(profiles)
1094 };
1095 
1096 #if CONFIG_WMV3_DECODER
1097 AVCodec ff_wmv3_decoder = {
1098  .name = "wmv3",
1099  .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
1100  .type = AVMEDIA_TYPE_VIDEO,
1101  .id = AV_CODEC_ID_WMV3,
1102  .priv_data_size = sizeof(VC1Context),
1103  .init = vc1_decode_init,
1106  .flush = ff_mpeg_flush,
1107  .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
1108  .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1109  .profiles = NULL_IF_CONFIG_SMALL(profiles)
1110 };
1111 #endif
1112 
1113 #if CONFIG_WMV3_VDPAU_DECODER
1114 AVCodec ff_wmv3_vdpau_decoder = {
1115  .name = "wmv3_vdpau",
1116  .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 VDPAU"),
1117  .type = AVMEDIA_TYPE_VIDEO,
1118  .id = AV_CODEC_ID_WMV3,
1119  .priv_data_size = sizeof(VC1Context),
1120  .init = vc1_decode_init,
1124  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU_WMV3, AV_PIX_FMT_NONE },
1125  .profiles = NULL_IF_CONFIG_SMALL(profiles)
1126 };
1127 #endif
1128 
1129 #if CONFIG_VC1_VDPAU_DECODER
1130 AVCodec ff_vc1_vdpau_decoder = {
1131  .name = "vc1_vdpau",
1132  .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1 VDPAU"),
1133  .type = AVMEDIA_TYPE_VIDEO,
1134  .id = AV_CODEC_ID_VC1,
1135  .priv_data_size = sizeof(VC1Context),
1136  .init = vc1_decode_init,
1140  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU_VC1, AV_PIX_FMT_NONE },
1141  .profiles = NULL_IF_CONFIG_SMALL(profiles)
1142 };
1143 #endif
1144 
1145 #if CONFIG_WMV3IMAGE_DECODER
1146 AVCodec ff_wmv3image_decoder = {
1147  .name = "wmv3image",
1148  .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
1149  .type = AVMEDIA_TYPE_VIDEO,
1150  .id = AV_CODEC_ID_WMV3IMAGE,
1151  .priv_data_size = sizeof(VC1Context),
1152  .init = vc1_decode_init,
1155  .capabilities = CODEC_CAP_DR1,
1156  .flush = vc1_sprite_flush,
1157  .pix_fmts = (const enum AVPixelFormat[]) {
1160  },
1161 };
1162 #endif
1163 
1164 #if CONFIG_VC1IMAGE_DECODER
1165 AVCodec ff_vc1image_decoder = {
1166  .name = "vc1image",
1167  .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
1168  .type = AVMEDIA_TYPE_VIDEO,
1169  .id = AV_CODEC_ID_VC1IMAGE,
1170  .priv_data_size = sizeof(VC1Context),
1171  .init = vc1_decode_init,
1174  .capabilities = CODEC_CAP_DR1,
1175  .flush = vc1_sprite_flush,
1176  .pix_fmts = (const enum AVPixelFormat[]) {
1179  },
1180 };
1181 #endif