FFmpeg
ulti.c
Go to the documentation of this file.
1 /*
2  * IBM Ultimotion Video Decoder
3  * Copyright (C) 2004 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * IBM Ultimotion Video Decoder.
25  */
26 
27 #include "avcodec.h"
28 #include "bytestream.h"
29 #include "codec_internal.h"
30 #include "decode.h"
31 
32 #include "ulti_cb.h"
33 
34 typedef struct UltimotionDecodeContext {
38  const uint8_t *ulti_codebook;
41 
43 {
45 
46  s->avctx = avctx;
47  s->width = avctx->width;
48  s->height = avctx->height;
49  s->blocks = (s->width / 8) * (s->height / 8);
50  if (s->blocks == 0)
51  return AVERROR_INVALIDDATA;
52  avctx->pix_fmt = AV_PIX_FMT_YUV410P;
53  s->ulti_codebook = ulti_codebook;
54 
55  s->frame = av_frame_alloc();
56  if (!s->frame)
57  return AVERROR(ENOMEM);
58 
59  return 0;
60 }
61 
63 {
65 
66  av_frame_free(&s->frame);
67 
68  return 0;
69 }
70 
71 static const int block_coords[8] = // 4x4 block coords in 8x8 superblock
72  { 0, 0, 0, 4, 4, 4, 4, 0};
73 
74 static const int angle_by_index[4] = { 0, 2, 6, 12};
75 
76 /* Lookup tables for luma and chroma - used by ulti_convert_yuv() */
77 static const uint8_t ulti_lumas[64] =
78  { 0x10, 0x13, 0x17, 0x1A, 0x1E, 0x21, 0x25, 0x28,
79  0x2C, 0x2F, 0x33, 0x36, 0x3A, 0x3D, 0x41, 0x44,
80  0x48, 0x4B, 0x4F, 0x52, 0x56, 0x59, 0x5C, 0x60,
81  0x63, 0x67, 0x6A, 0x6E, 0x71, 0x75, 0x78, 0x7C,
82  0x7F, 0x83, 0x86, 0x8A, 0x8D, 0x91, 0x94, 0x98,
83  0x9B, 0x9F, 0xA2, 0xA5, 0xA9, 0xAC, 0xB0, 0xB3,
84  0xB7, 0xBA, 0xBE, 0xC1, 0xC5, 0xC8, 0xCC, 0xCF,
85  0xD3, 0xD6, 0xDA, 0xDD, 0xE1, 0xE4, 0xE8, 0xEB};
86 
87 static const uint8_t ulti_chromas[16] =
88  { 0x60, 0x67, 0x6D, 0x73, 0x7A, 0x80, 0x86, 0x8D,
89  0x93, 0x99, 0xA0, 0xA6, 0xAC, 0xB3, 0xB9, 0xC0};
90 
91 /* convert Ultimotion YUV block (sixteen 6-bit Y samples and
92  two 4-bit chroma samples) into standard YUV and put it into frame */
93 static void ulti_convert_yuv(AVFrame *frame, int x, int y,
94  uint8_t *luma,int chroma)
95 {
96  uint8_t *y_plane, *cr_plane, *cb_plane;
97  int i;
98 
99  y_plane = frame->data[0] + x + y * frame->linesize[0];
100  cr_plane = frame->data[1] + (x / 4) + (y / 4) * frame->linesize[1];
101  cb_plane = frame->data[2] + (x / 4) + (y / 4) * frame->linesize[2];
102 
103  cr_plane[0] = ulti_chromas[chroma >> 4];
104 
105  cb_plane[0] = ulti_chromas[chroma & 0xF];
106 
107 
108  for(i = 0; i < 16; i++){
109  y_plane[i & 3] = ulti_lumas[luma[i]];
110  if((i & 3) == 3) { //next row
111  y_plane += frame->linesize[0];
112  }
113  }
114 }
115 
116 /* generate block like in MS Video1 */
117 static void ulti_pattern(AVFrame *frame, int x, int y,
118  int f0, int f1, int Y0, int Y1, int chroma)
119 {
120  uint8_t Luma[16];
121  int mask, i;
122  for(mask = 0x80, i = 0; mask; mask >>= 1, i++) {
123  if(f0 & mask)
124  Luma[i] = Y1;
125  else
126  Luma[i] = Y0;
127  }
128 
129  for(mask = 0x80, i = 8; mask; mask >>= 1, i++) {
130  if(f1 & mask)
131  Luma[i] = Y1;
132  else
133  Luma[i] = Y0;
134  }
135 
136  ulti_convert_yuv(frame, x, y, Luma, chroma);
137 }
138 
139 /* fill block with some gradient */
140 static void ulti_grad(AVFrame *frame, int x, int y, uint8_t *Y, int chroma, int angle)
141 {
142  uint8_t Luma[16];
143  if(angle & 8) { //reverse order
144  int t;
145  angle &= 0x7;
146  t = Y[0];
147  Y[0] = Y[3];
148  Y[3] = t;
149  t = Y[1];
150  Y[1] = Y[2];
151  Y[2] = t;
152  }
153  switch(angle){
154  case 0:
155  Luma[0] = Y[0]; Luma[1] = Y[1]; Luma[2] = Y[2]; Luma[3] = Y[3];
156  Luma[4] = Y[0]; Luma[5] = Y[1]; Luma[6] = Y[2]; Luma[7] = Y[3];
157  Luma[8] = Y[0]; Luma[9] = Y[1]; Luma[10] = Y[2]; Luma[11] = Y[3];
158  Luma[12] = Y[0]; Luma[13] = Y[1]; Luma[14] = Y[2]; Luma[15] = Y[3];
159  break;
160  case 1:
161  Luma[0] = Y[1]; Luma[1] = Y[2]; Luma[2] = Y[3]; Luma[3] = Y[3];
162  Luma[4] = Y[0]; Luma[5] = Y[1]; Luma[6] = Y[2]; Luma[7] = Y[3];
163  Luma[8] = Y[0]; Luma[9] = Y[1]; Luma[10] = Y[2]; Luma[11] = Y[3];
164  Luma[12] = Y[0]; Luma[13] = Y[0]; Luma[14] = Y[1]; Luma[15] = Y[2];
165  break;
166  case 2:
167  Luma[0] = Y[1]; Luma[1] = Y[2]; Luma[2] = Y[3]; Luma[3] = Y[3];
168  Luma[4] = Y[1]; Luma[5] = Y[2]; Luma[6] = Y[2]; Luma[7] = Y[3];
169  Luma[8] = Y[0]; Luma[9] = Y[1]; Luma[10] = Y[1]; Luma[11] = Y[2];
170  Luma[12] = Y[0]; Luma[13] = Y[0]; Luma[14] = Y[1]; Luma[15] = Y[2];
171  break;
172  case 3:
173  Luma[0] = Y[2]; Luma[1] = Y[3]; Luma[2] = Y[3]; Luma[3] = Y[3];
174  Luma[4] = Y[1]; Luma[5] = Y[2]; Luma[6] = Y[2]; Luma[7] = Y[3];
175  Luma[8] = Y[0]; Luma[9] = Y[1]; Luma[10] = Y[1]; Luma[11] = Y[2];
176  Luma[12] = Y[0]; Luma[13] = Y[0]; Luma[14] = Y[0]; Luma[15] = Y[1];
177  break;
178  case 4:
179  Luma[0] = Y[3]; Luma[1] = Y[3]; Luma[2] = Y[3]; Luma[3] = Y[3];
180  Luma[4] = Y[2]; Luma[5] = Y[2]; Luma[6] = Y[2]; Luma[7] = Y[2];
181  Luma[8] = Y[1]; Luma[9] = Y[1]; Luma[10] = Y[1]; Luma[11] = Y[1];
182  Luma[12] = Y[0]; Luma[13] = Y[0]; Luma[14] = Y[0]; Luma[15] = Y[0];
183  break;
184  case 5:
185  Luma[0] = Y[3]; Luma[1] = Y[3]; Luma[2] = Y[3]; Luma[3] = Y[2];
186  Luma[4] = Y[3]; Luma[5] = Y[2]; Luma[6] = Y[2]; Luma[7] = Y[1];
187  Luma[8] = Y[2]; Luma[9] = Y[1]; Luma[10] = Y[1]; Luma[11] = Y[0];
188  Luma[12] = Y[1]; Luma[13] = Y[0]; Luma[14] = Y[0]; Luma[15] = Y[0];
189  break;
190  case 6:
191  Luma[0] = Y[3]; Luma[1] = Y[3]; Luma[2] = Y[2]; Luma[3] = Y[2];
192  Luma[4] = Y[3]; Luma[5] = Y[2]; Luma[6] = Y[1]; Luma[7] = Y[1];
193  Luma[8] = Y[2]; Luma[9] = Y[2]; Luma[10] = Y[1]; Luma[11] = Y[0];
194  Luma[12] = Y[1]; Luma[13] = Y[1]; Luma[14] = Y[0]; Luma[15] = Y[0];
195  break;
196  case 7:
197  Luma[0] = Y[3]; Luma[1] = Y[3]; Luma[2] = Y[2]; Luma[3] = Y[1];
198  Luma[4] = Y[3]; Luma[5] = Y[2]; Luma[6] = Y[1]; Luma[7] = Y[0];
199  Luma[8] = Y[3]; Luma[9] = Y[2]; Luma[10] = Y[1]; Luma[11] = Y[0];
200  Luma[12] = Y[2]; Luma[13] = Y[1]; Luma[14] = Y[0]; Luma[15] = Y[0];
201  break;
202  default:
203  Luma[0] = Y[0]; Luma[1] = Y[0]; Luma[2] = Y[1]; Luma[3] = Y[1];
204  Luma[4] = Y[0]; Luma[5] = Y[0]; Luma[6] = Y[1]; Luma[7] = Y[1];
205  Luma[8] = Y[2]; Luma[9] = Y[2]; Luma[10] = Y[3]; Luma[11] = Y[3];
206  Luma[12] = Y[2]; Luma[13] = Y[2]; Luma[14] = Y[3]; Luma[15] = Y[3];
207  break;
208  }
209 
210  ulti_convert_yuv(frame, x, y, Luma, chroma);
211 }
212 
213 static int ulti_decode_frame(AVCodecContext *avctx, AVFrame *rframe,
214  int *got_frame, AVPacket *avpkt)
215 {
216  const uint8_t *buf = avpkt->data;
217  int buf_size = avpkt->size;
219  int modifier = 0;
220  int uniq = 0;
221  int mode = 0;
222  int blocks = 0;
223  int done = 0;
224  int x = 0, y = 0;
225  int i, ret;
226  int skip;
227  int tmp;
228 
229  if ((ret = ff_reget_buffer(avctx, s->frame, 0)) < 0)
230  return ret;
231 
232  bytestream2_init(&s->gb, buf, buf_size);
233 
234  while(!done) {
235  int idx;
236  if(blocks >= s->blocks || y >= s->height)
237  break;//all blocks decoded
238 
239  if (bytestream2_get_bytes_left(&s->gb) < 1)
240  goto err;
241  idx = bytestream2_get_byteu(&s->gb);
242  if((idx & 0xF8) == 0x70) {
243  switch(idx) {
244  case 0x70: //change modifier
245  modifier = bytestream2_get_byte(&s->gb);
246  if(modifier>1)
247  av_log(avctx, AV_LOG_INFO, "warning: modifier must be 0 or 1, got %i\n", modifier);
248  break;
249  case 0x71: // set uniq flag
250  uniq = 1;
251  break;
252  case 0x72: //toggle mode
253  mode = !mode;
254  break;
255  case 0x73: //end-of-frame
256  done = 1;
257  break;
258  case 0x74: //skip some blocks
259  skip = bytestream2_get_byte(&s->gb);
260  if ((blocks + skip) >= s->blocks)
261  break;
262  blocks += skip;
263  x += skip * 8;
264  while(x >= s->width) {
265  x -= s->width;
266  y += 8;
267  }
268  break;
269  default:
270  av_log(avctx, AV_LOG_INFO, "warning: unknown escape 0x%02X\n", idx);
271  }
272  } else { //handle one block
273  int code;
274  int cf;
275  int angle = 0;
276  uint8_t Y[4]; // luma samples of block
277  int tx = 0, ty = 0; //coords of subblock
278  int chroma = 0;
279  if (mode || uniq) {
280  uniq = 0;
281  cf = 1;
282  chroma = 0;
283  } else {
284  cf = 0;
285  if (idx) {
286  chroma = bytestream2_get_byte(&s->gb);
287  }
288  }
289  for (i = 0; i < 4; i++) { // for every subblock
290  code = (idx >> (6 - i*2)) & 3; //extract 2 bits
291  if(!code) //skip subblock
292  continue;
293  if(cf) {
294  chroma = bytestream2_get_byte(&s->gb);
295  }
296  tx = x + block_coords[i * 2];
297  ty = y + block_coords[(i * 2) + 1];
298  switch(code) {
299  case 1:
300  tmp = bytestream2_get_byte(&s->gb);
301 
302  angle = angle_by_index[(tmp >> 6) & 0x3];
303 
304  Y[0] = tmp & 0x3F;
305  Y[1] = Y[0];
306 
307  if (angle) {
308  Y[2] = Y[0]+1;
309  if (Y[2] > 0x3F)
310  Y[2] = 0x3F;
311  Y[3] = Y[2];
312  } else {
313  Y[2] = Y[0];
314  Y[3] = Y[0];
315  }
316  break;
317 
318  case 2:
319  if (modifier) { // unpack four luma samples
320  tmp = bytestream2_get_be24(&s->gb);
321 
322  Y[0] = (tmp >> 18) & 0x3F;
323  Y[1] = (tmp >> 12) & 0x3F;
324  Y[2] = (tmp >> 6) & 0x3F;
325  Y[3] = tmp & 0x3F;
326  angle = 16;
327  } else { // retrieve luma samples from codebook
328  tmp = bytestream2_get_be16(&s->gb);
329 
330  angle = (tmp >> 12) & 0xF;
331  tmp &= 0xFFF;
332  tmp <<= 2;
333  Y[0] = s->ulti_codebook[tmp];
334  Y[1] = s->ulti_codebook[tmp + 1];
335  Y[2] = s->ulti_codebook[tmp + 2];
336  Y[3] = s->ulti_codebook[tmp + 3];
337  }
338  break;
339 
340  case 3:
341  if (modifier) { // all 16 luma samples
342  uint8_t Luma[16];
343 
344  if (bytestream2_get_bytes_left(&s->gb) < 12)
345  goto err;
346  tmp = bytestream2_get_be24u(&s->gb);
347  Luma[0] = (tmp >> 18) & 0x3F;
348  Luma[1] = (tmp >> 12) & 0x3F;
349  Luma[2] = (tmp >> 6) & 0x3F;
350  Luma[3] = tmp & 0x3F;
351 
352  tmp = bytestream2_get_be24u(&s->gb);
353  Luma[4] = (tmp >> 18) & 0x3F;
354  Luma[5] = (tmp >> 12) & 0x3F;
355  Luma[6] = (tmp >> 6) & 0x3F;
356  Luma[7] = tmp & 0x3F;
357 
358  tmp = bytestream2_get_be24u(&s->gb);
359  Luma[8] = (tmp >> 18) & 0x3F;
360  Luma[9] = (tmp >> 12) & 0x3F;
361  Luma[10] = (tmp >> 6) & 0x3F;
362  Luma[11] = tmp & 0x3F;
363 
364  tmp = bytestream2_get_be24u(&s->gb);
365  Luma[12] = (tmp >> 18) & 0x3F;
366  Luma[13] = (tmp >> 12) & 0x3F;
367  Luma[14] = (tmp >> 6) & 0x3F;
368  Luma[15] = tmp & 0x3F;
369 
370  ulti_convert_yuv(s->frame, tx, ty, Luma, chroma);
371  } else {
372  if (bytestream2_get_bytes_left(&s->gb) < 4)
373  goto err;
374  tmp = bytestream2_get_byteu(&s->gb);
375  if(tmp & 0x80) {
376  angle = (tmp >> 4) & 0x7;
377  tmp = (tmp << 8) + bytestream2_get_byteu(&s->gb);
378  Y[0] = (tmp >> 6) & 0x3F;
379  Y[1] = tmp & 0x3F;
380  Y[2] = bytestream2_get_byteu(&s->gb) & 0x3F;
381  Y[3] = bytestream2_get_byteu(&s->gb) & 0x3F;
382  ulti_grad(s->frame, tx, ty, Y, chroma, angle); //draw block
383  } else { // some patterns
384  int f0 = tmp;
385  int f1 = bytestream2_get_byteu(&s->gb);
386  Y[0] = bytestream2_get_byteu(&s->gb) & 0x3F;
387  Y[1] = bytestream2_get_byteu(&s->gb) & 0x3F;
388  ulti_pattern(s->frame, tx, ty, f0, f1, Y[0], Y[1], chroma);
389  }
390  }
391  break;
392  }
393  if(code != 3)
394  ulti_grad(s->frame, tx, ty, Y, chroma, angle); // draw block
395  }
396  blocks++;
397  x += 8;
398  if(x >= s->width) {
399  x = 0;
400  y += 8;
401  }
402  }
403  }
404 
405  *got_frame = 1;
406  if ((ret = av_frame_ref(rframe, s->frame)) < 0)
407  return ret;
408 
409  return buf_size;
410 
411 err:
412  av_log(avctx, AV_LOG_ERROR,
413  "Insufficient data\n");
414  return AVERROR_INVALIDDATA;
415 }
416 
418  .p.name = "ultimotion",
419  CODEC_LONG_NAME("IBM UltiMotion"),
420  .p.type = AVMEDIA_TYPE_VIDEO,
421  .p.id = AV_CODEC_ID_ULTI,
422  .priv_data_size = sizeof(UltimotionDecodeContext),
424  .close = ulti_decode_end,
426  .p.capabilities = AV_CODEC_CAP_DR1,
427 };
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ulti_convert_yuv
static void ulti_convert_yuv(AVFrame *frame, int x, int y, uint8_t *luma, int chroma)
Definition: ulti.c:93
GetByteContext
Definition: bytestream.h:33
mask
int mask
Definition: mediacodecdec_common.c:154
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
mode
Definition: swscale.c:52
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:28
AVPacket::data
uint8_t * data
Definition: packet.h:539
chroma
static av_always_inline void chroma(WaveformContext *s, AVFrame *in, AVFrame *out, int component, int intensity, int offset_y, int offset_x, int column, int mirror, int jobnr, int nb_jobs)
Definition: vf_waveform.c:1639
FFCodec
Definition: codec_internal.h:127
UltimotionDecodeContext::width
int width
Definition: ulti.c:36
ulti_cb.h
angle_by_index
static const int angle_by_index[4]
Definition: ulti.c:74
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
UltimotionDecodeContext::gb
GetByteContext gb
Definition: ulti.c:39
ulti_decode_init
static av_cold int ulti_decode_init(AVCodecContext *avctx)
Definition: ulti.c:42
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
av_cold
#define av_cold
Definition: attributes.h:90
ulti_decode_end
static av_cold int ulti_decode_end(AVCodecContext *avctx)
Definition: ulti.c:62
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:311
s
#define s(width, name)
Definition: cbs_vp9.c:198
decode.h
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:296
UltimotionDecodeContext::avctx
AVCodecContext * avctx
Definition: ulti.c:35
ulti_grad
static void ulti_grad(AVFrame *frame, int x, int y, uint8_t *Y, int chroma, int angle)
Definition: ulti.c:140
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
UltimotionDecodeContext::frame
AVFrame * frame
Definition: ulti.c:37
UltimotionDecodeContext::ulti_codebook
const uint8_t * ulti_codebook
Definition: ulti.c:38
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
ulti_chromas
static const uint8_t ulti_chromas[16]
Definition: ulti.c:87
AVPacket::size
int size
Definition: packet.h:540
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:400
codec_internal.h
ulti_pattern
static void ulti_pattern(AVFrame *frame, int x, int y, int f0, int f1, int Y0, int Y1, int chroma)
Definition: ulti.c:117
Y
#define Y
Definition: boxblur.h:37
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:220
block_coords
static const int block_coords[8]
Definition: ulti.c:71
ff_ulti_decoder
const FFCodec ff_ulti_decoder
Definition: ulti.c:417
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
AVCodecContext::height
int height
Definition: avcodec.h:624
AV_CODEC_ID_ULTI
@ AV_CODEC_ID_ULTI
Definition: codec_id.h:109
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:663
ulti_lumas
static const uint8_t ulti_lumas[64]
Definition: ulti.c:77
avcodec.h
ff_reget_buffer
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Identical in function to ff_get_buffer(), except it reuses the existing buffer if available.
Definition: decode.c:1815
ret
ret
Definition: filter_design.txt:187
UltimotionDecodeContext::blocks
int blocks
Definition: ulti.c:36
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVCodecContext
main external API structure.
Definition: avcodec.h:451
mode
mode
Definition: ebur128.h:83
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:624
bytestream.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
AV_PIX_FMT_YUV410P
@ AV_PIX_FMT_YUV410P
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:79
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
ulti_codebook
static const unsigned char ulti_codebook[16384]
Definition: ulti_cb.h:25
UltimotionDecodeContext
Definition: ulti.c:34
skip
static void BS_FUNC() skip(BSCTX *bc, unsigned int n)
Skip n bits in the buffer.
Definition: bitstream_template.h:375
ulti_decode_frame
static int ulti_decode_frame(AVCodecContext *avctx, AVFrame *rframe, int *got_frame, AVPacket *avpkt)
Definition: ulti.c:213
UltimotionDecodeContext::height
int height
Definition: ulti.c:36