FFmpeg
rasc.c
Go to the documentation of this file.
1 /*
2  * RemotelyAnywhere Screen Capture decoder
3  *
4  * Copyright (c) 2018 Paul B Mahol
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <stdio.h>
24 #include <string.h>
25 
26 #include "libavutil/mem.h"
27 #include "libavutil/opt.h"
28 
29 #include "avcodec.h"
30 #include "bytestream.h"
31 #include "codec_internal.h"
32 #include "decode.h"
33 #include "zlib_wrapper.h"
34 
35 #include <zlib.h>
36 
37 #define KBND MKTAG('K', 'B', 'N', 'D')
38 #define FINT MKTAG('F', 'I', 'N', 'T')
39 #define INIT MKTAG('I', 'N', 'I', 'T')
40 #define BNDL MKTAG('B', 'N', 'D', 'L')
41 #define KFRM MKTAG('K', 'F', 'R', 'M')
42 #define DLTA MKTAG('D', 'L', 'T', 'A')
43 #define MOUS MKTAG('M', 'O', 'U', 'S')
44 #define MPOS MKTAG('M', 'P', 'O', 'S')
45 #define MOVE MKTAG('M', 'O', 'V', 'E')
46 #define EMPT MKTAG('E', 'M', 'P', 'T')
47 
48 typedef struct RASCContext {
49  AVClass *class;
52  uint8_t *delta;
54  uint8_t *cursor;
56  unsigned cursor_w;
57  unsigned cursor_h;
58  unsigned cursor_x;
59  unsigned cursor_y;
60  int stride;
61  int bpp;
66 } RASCContext;
67 
68 static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
69 {
70  RASCContext *s = avctx->priv_data;
71  uint8_t *dst = frame->data[0];
72 
73  if (!dst)
74  return;
75 
76  for (int y = 0; y < avctx->height; y++) {
77  memset(dst, 0, avctx->width * s->bpp);
78  dst += frame->linesize[0];
79  }
80 }
81 
83 {
84  RASCContext *s = avctx->priv_data;
85  uint8_t *srcp = src->data[0];
86  uint8_t *dstp = dst->data[0];
87 
88  for (int y = 0; y < avctx->height; y++) {
89  memcpy(dstp, srcp, s->stride);
90  srcp += src->linesize[0];
91  dstp += dst->linesize[0];
92  }
93 }
94 
95 static int init_frames(AVCodecContext *avctx)
96 {
97  RASCContext *s = avctx->priv_data;
98  int ret;
99 
100  av_frame_unref(s->frame1);
101  av_frame_unref(s->frame2);
102  if ((ret = ff_get_buffer(avctx, s->frame1, 0)) < 0)
103  return ret;
104 
105  if ((ret = ff_get_buffer(avctx, s->frame2, 0)) < 0)
106  return ret;
107 
108  clear_plane(avctx, s->frame2);
109  clear_plane(avctx, s->frame1);
110 
111  return 0;
112 }
113 
114 static int decode_fint(AVCodecContext *avctx,
115  const AVPacket *avpkt, unsigned size)
116 {
117  RASCContext *s = avctx->priv_data;
118  GetByteContext *gb = &s->gb;
119  unsigned w, h, fmt;
120  int ret;
121 
122  if (bytestream2_peek_le32(gb) != 0x65) {
123  if (!s->frame2->data[0] || !s->frame1->data[0])
124  return AVERROR_INVALIDDATA;
125 
126  clear_plane(avctx, s->frame2);
127  clear_plane(avctx, s->frame1);
128  return 0;
129  }
130  if (bytestream2_get_bytes_left(gb) < 72)
131  return AVERROR_INVALIDDATA;
132 
133  bytestream2_skip(gb, 8);
134  w = bytestream2_get_le32(gb);
135  h = bytestream2_get_le32(gb);
136  bytestream2_skip(gb, 30);
137  fmt = bytestream2_get_le16(gb);
138  bytestream2_skip(gb, 24);
139 
140  switch (fmt) {
141  case 8: s->stride = FFALIGN(w, 4);
142  s->bpp = 1;
143  fmt = AV_PIX_FMT_PAL8; break;
144  case 16: s->stride = w * 2;
145  s->bpp = 2;
146  fmt = AV_PIX_FMT_RGB555LE; break;
147  case 32: s->stride = w * 4;
148  s->bpp = 4;
149  fmt = AV_PIX_FMT_BGR0; break;
150  default: return AVERROR_INVALIDDATA;
151  }
152 
153  ret = ff_set_dimensions(avctx, w, h);
154  if (ret < 0)
155  return ret;
156  avctx->width = w;
157  avctx->height = h;
158  avctx->pix_fmt = fmt;
159 
160  ret = init_frames(avctx);
161  if (ret < 0)
162  return ret;
163 
164  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
165  uint32_t *pal = (uint32_t *)s->frame2->data[1];
166 
167  for (int i = 0; i < 256; i++)
168  pal[i] = bytestream2_get_le32(gb) | 0xFF000000u;
169  }
170 
171  return 0;
172 }
173 
174 static int decode_zlib(AVCodecContext *avctx, const AVPacket *avpkt,
175  unsigned size, unsigned uncompressed_size)
176 {
177  RASCContext *s = avctx->priv_data;
178  z_stream *const zstream = &s->zstream.zstream;
179  GetByteContext *gb = &s->gb;
180  int zret;
181 
182  zret = inflateReset(zstream);
183  if (zret != Z_OK) {
184  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
185  return AVERROR_EXTERNAL;
186  }
187 
188  av_fast_padded_malloc(&s->delta, &s->delta_size, uncompressed_size);
189  if (!s->delta)
190  return AVERROR(ENOMEM);
191 
192  zstream->next_in = avpkt->data + bytestream2_tell(gb);
193  zstream->avail_in = FFMIN(size, bytestream2_get_bytes_left(gb));
194 
195  zstream->next_out = s->delta;
196  zstream->avail_out = s->delta_size;
197 
198  zret = inflate(zstream, Z_FINISH);
199  if (zret != Z_STREAM_END) {
200  av_log(avctx, AV_LOG_ERROR,
201  "Inflate failed with return code: %d.\n", zret);
202  return AVERROR_INVALIDDATA;
203  }
204 
205  return 0;
206 }
207 
208 static int decode_move(AVCodecContext *avctx,
209  const AVPacket *avpkt, unsigned size)
210 {
211  RASCContext *s = avctx->priv_data;
212  GetByteContext *gb = &s->gb;
214  unsigned pos, compression, nb_moves;
215  unsigned uncompressed_size;
216  int ret;
217 
218  pos = bytestream2_tell(gb);
219  bytestream2_skip(gb, 8);
220  nb_moves = bytestream2_get_le32(gb);
221  bytestream2_skip(gb, 8);
222  compression = bytestream2_get_le32(gb);
223 
224  if (nb_moves > INT32_MAX / 16 || nb_moves > avctx->width * avctx->height)
225  return AVERROR_INVALIDDATA;
226 
227  uncompressed_size = 16 * nb_moves;
228 
229  if (compression == 1) {
230  ret = decode_zlib(avctx, avpkt,
231  size - (bytestream2_tell(gb) - pos),
232  uncompressed_size);
233  if (ret < 0)
234  return ret;
235  bytestream2_init(&mc, s->delta, uncompressed_size);
236  } else if (compression == 0) {
237  bytestream2_init(&mc, avpkt->data + bytestream2_tell(gb),
239  } else if (compression == 2) {
240  avpriv_request_sample(avctx, "compression %d", compression);
241  return AVERROR_PATCHWELCOME;
242  } else {
243  return AVERROR_INVALIDDATA;
244  }
245 
246  if (bytestream2_get_bytes_left(&mc) < uncompressed_size)
247  return AVERROR_INVALIDDATA;
248 
249  for (int i = 0; i < nb_moves; i++) {
250  int type, start_x, start_y, end_x, end_y, mov_x, mov_y;
251  uint8_t *e2, *b1, *b2;
252  int w, h;
253 
254  type = bytestream2_get_le16(&mc);
255  start_x = bytestream2_get_le16(&mc);
256  start_y = bytestream2_get_le16(&mc);
257  end_x = bytestream2_get_le16(&mc);
258  end_y = bytestream2_get_le16(&mc);
259  mov_x = bytestream2_get_le16(&mc);
260  mov_y = bytestream2_get_le16(&mc);
261  bytestream2_skip(&mc, 2);
262 
263  if (start_x >= avctx->width || start_y >= avctx->height ||
264  end_x >= avctx->width || end_y >= avctx->height ||
265  mov_x >= avctx->width || mov_y >= avctx->height) {
266  continue;
267  }
268 
269  if (start_x >= end_x || start_y >= end_y)
270  continue;
271 
272  w = end_x - start_x;
273  h = end_y - start_y;
274 
275  if (mov_x + w > avctx->width || mov_y + h > avctx->height)
276  continue;
277 
278  if (!s->frame2->data[0] || !s->frame1->data[0])
279  return AVERROR_INVALIDDATA;
280 
281  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
282  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (start_y + h - 1) + start_x * s->bpp;
283  e2 = s->frame2->data[0] + s->frame2->linesize[0] * (mov_y + h - 1) + mov_x * s->bpp;
284 
285  if (type == 2) {
286  for (int j = 0; j < h; j++) {
287  memcpy(b1, b2, w * s->bpp);
288  b1 -= s->frame1->linesize[0];
289  b2 -= s->frame2->linesize[0];
290  }
291  } else if (type == 1) {
292  for (int j = 0; j < h; j++) {
293  memset(b2, 0, w * s->bpp);
294  b2 -= s->frame2->linesize[0];
295  }
296  } else if (type == 0) {
297  uint8_t *buffer;
298 
299  av_fast_padded_malloc(&s->delta, &s->delta_size, w * h * s->bpp);
300  buffer = s->delta;
301  if (!buffer)
302  return AVERROR(ENOMEM);
303 
304  for (int j = 0; j < h; j++) {
305  memcpy(buffer + j * w * s->bpp, e2, w * s->bpp);
306  e2 -= s->frame2->linesize[0];
307  }
308 
309  for (int j = 0; j < h; j++) {
310  memcpy(b2, buffer + j * w * s->bpp, w * s->bpp);
311  b2 -= s->frame2->linesize[0];
312  }
313  } else {
314  return AVERROR_INVALIDDATA;
315  }
316  }
317 
318  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
319 
320  return 0;
321 }
322 
323 #define NEXT_LINE \
324  if (cx >= w * s->bpp) { \
325  cx = 0; \
326  cy--; \
327  b1 -= s->frame1->linesize[0]; \
328  b2 -= s->frame2->linesize[0]; \
329  } \
330  len--;
331 
332 static int decode_dlta(AVCodecContext *avctx,
333  const AVPacket *avpkt, unsigned size)
334 {
335  RASCContext *s = avctx->priv_data;
336  GetByteContext *gb = &s->gb;
338  unsigned uncompressed_size, pos;
339  unsigned x, y, w, h;
340  int ret, cx, cy, compression;
341  uint8_t *b1, *b2;
342 
343  pos = bytestream2_tell(gb);
344  bytestream2_skip(gb, 12);
345  uncompressed_size = bytestream2_get_le32(gb);
346  x = bytestream2_get_le32(gb);
347  y = bytestream2_get_le32(gb);
348  w = bytestream2_get_le32(gb);
349  h = bytestream2_get_le32(gb);
350 
351  if (x >= avctx->width || y >= avctx->height ||
352  w > avctx->width || h > avctx->height)
353  return AVERROR_INVALIDDATA;
354 
355  if (x + w > avctx->width || y + h > avctx->height)
356  return AVERROR_INVALIDDATA;
357 
358  bytestream2_skip(gb, 4);
359  compression = bytestream2_get_le32(gb);
360 
361  if (compression == 1) {
362  if (w * h * s->bpp * 3 < uncompressed_size)
363  return AVERROR_INVALIDDATA;
364  ret = decode_zlib(avctx, avpkt, size, uncompressed_size);
365  if (ret < 0)
366  return ret;
367  bytestream2_init(&dc, s->delta, uncompressed_size);
368  } else if (compression == 0) {
369  if (bytestream2_get_bytes_left(gb) < uncompressed_size)
370  return AVERROR_INVALIDDATA;
371  bytestream2_init(&dc, avpkt->data + bytestream2_tell(gb),
372  uncompressed_size);
373  } else if (compression == 2) {
374  avpriv_request_sample(avctx, "compression %d", compression);
375  return AVERROR_PATCHWELCOME;
376  } else {
377  return AVERROR_INVALIDDATA;
378  }
379 
380  if (!s->frame2->data[0] || !s->frame1->data[0])
381  return AVERROR_INVALIDDATA;
382 
383  b1 = s->frame1->data[0] + s->frame1->linesize[0] * (int)(y + h - 1) + ((int)x) * s->bpp;
384  b2 = s->frame2->data[0] + s->frame2->linesize[0] * (int)(y + h - 1) + ((int)x) * s->bpp;
385  cx = 0, cy = h;
386  while (bytestream2_get_bytes_left(&dc) > 0) {
387  int type = bytestream2_get_byte(&dc);
388  int len = bytestream2_get_byte(&dc);
389  unsigned fill;
390 
391  switch (type) {
392  case 1:
393  while (len > 0 && cy > 0) {
394  cx++;
395  NEXT_LINE
396  }
397  break;
398  case 2:
399  while (len > 0 && cy > 0) {
400  int v0 = b1[cx];
401  int v1 = b2[cx];
402 
403  b2[cx] = v0;
404  b1[cx] = v1;
405  cx++;
406  NEXT_LINE
407  }
408  break;
409  case 3:
410  while (len > 0 && cy > 0) {
411  fill = bytestream2_get_byte(&dc);
412  b1[cx] = b2[cx];
413  b2[cx] = fill;
414  cx++;
415  NEXT_LINE
416  }
417  break;
418  case 4:
419  fill = bytestream2_get_byte(&dc);
420  while (len > 0 && cy > 0) {
421  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
422  AV_WL32(b2 + cx, fill);
423  cx++;
424  NEXT_LINE
425  }
426  break;
427  case 7:
428  fill = bytestream2_get_le32(&dc);
429  while (len > 0 && cy > 0) {
430  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
431  AV_WL32(b2 + cx, fill);
432  cx += 4;
433  NEXT_LINE
434  }
435  break;
436  case 10:
437  while (len > 0 && cy > 0) {
438  cx += 4;
439  NEXT_LINE
440  }
441  break;
442  case 12:
443  while (len > 0 && cy > 0) {
444  unsigned v0, v1;
445 
446  v0 = AV_RL32(b2 + cx);
447  v1 = AV_RL32(b1 + cx);
448  AV_WL32(b2 + cx, v1);
449  AV_WL32(b1 + cx, v0);
450  cx += 4;
451  NEXT_LINE
452  }
453  break;
454  case 13:
455  while (len > 0 && cy > 0) {
456  fill = bytestream2_get_le32(&dc);
457  AV_WL32(b1 + cx, AV_RL32(b2 + cx));
458  AV_WL32(b2 + cx, fill);
459  cx += 4;
460  NEXT_LINE
461  }
462  break;
463  default:
464  avpriv_request_sample(avctx, "runlen %d", type);
465  return AVERROR_INVALIDDATA;
466  }
467  }
468 
469  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
470 
471  return 0;
472 }
473 
474 static int decode_kfrm(AVCodecContext *avctx,
475  const AVPacket *avpkt, unsigned size)
476 {
477  RASCContext *s = avctx->priv_data;
478  z_stream *const zstream = &s->zstream.zstream;
479  GetByteContext *gb = &s->gb;
480  uint8_t *dst;
481  unsigned pos;
482  int zret, ret;
483 
484  pos = bytestream2_tell(gb);
485  if (bytestream2_peek_le32(gb) == 0x65) {
486  ret = decode_fint(avctx, avpkt, size);
487  if (ret < 0)
488  return ret;
489  }
490 
491  if (!s->frame2->data[0])
492  return AVERROR_INVALIDDATA;
493 
494  zret = inflateReset(zstream);
495  if (zret != Z_OK) {
496  av_log(avctx, AV_LOG_ERROR, "Inflate reset error: %d\n", zret);
497  return AVERROR_EXTERNAL;
498  }
499 
500  zstream->next_in = avpkt->data + bytestream2_tell(gb);
501  zstream->avail_in = bytestream2_get_bytes_left(gb);
502 
503  dst = s->frame2->data[0] + (avctx->height - 1) * s->frame2->linesize[0];
504  for (int i = 0; i < avctx->height; i++) {
505  zstream->next_out = dst;
506  zstream->avail_out = s->stride;
507 
508  zret = inflate(zstream, Z_SYNC_FLUSH);
509  if (zret != Z_OK && zret != Z_STREAM_END) {
510  av_log(avctx, AV_LOG_ERROR,
511  "Inflate failed with return code: %d.\n", zret);
512  return AVERROR_INVALIDDATA;
513  }
514 
515  dst -= s->frame2->linesize[0];
516  }
517 
518  dst = s->frame1->data[0] + (avctx->height - 1) * s->frame1->linesize[0];
519  for (int i = 0; i < avctx->height; i++) {
520  zstream->next_out = dst;
521  zstream->avail_out = s->stride;
522 
523  zret = inflate(zstream, Z_SYNC_FLUSH);
524  if (zret != Z_OK && zret != Z_STREAM_END) {
525  av_log(avctx, AV_LOG_ERROR,
526  "Inflate failed with return code: %d.\n", zret);
527  return AVERROR_INVALIDDATA;
528  }
529 
530  dst -= s->frame1->linesize[0];
531  }
532 
533  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
534 
535  return 0;
536 }
537 
538 static int decode_mous(AVCodecContext *avctx,
539  const AVPacket *avpkt, unsigned size)
540 {
541  RASCContext *s = avctx->priv_data;
542  GetByteContext *gb = &s->gb;
543  unsigned w, h, pos, uncompressed_size;
544  int ret;
545 
546  pos = bytestream2_tell(gb);
547  bytestream2_skip(gb, 8);
548  w = bytestream2_get_le32(gb);
549  h = bytestream2_get_le32(gb);
550  bytestream2_skip(gb, 12);
551  uncompressed_size = bytestream2_get_le32(gb);
552 
553  if (w > avctx->width || h > avctx->height)
554  return AVERROR_INVALIDDATA;
555 
556  if (uncompressed_size != 3 * w * h)
557  return AVERROR_INVALIDDATA;
558 
559  av_fast_padded_malloc(&s->cursor, &s->cursor_size, uncompressed_size);
560  if (!s->cursor)
561  return AVERROR(ENOMEM);
562 
563  ret = decode_zlib(avctx, avpkt,
564  size - (bytestream2_tell(gb) - pos),
565  uncompressed_size);
566  if (ret < 0)
567  return ret;
568  memcpy(s->cursor, s->delta, uncompressed_size);
569 
570  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
571 
572  s->cursor_w = w;
573  s->cursor_h = h;
574 
575  return 0;
576 }
577 
578 static int decode_mpos(AVCodecContext *avctx,
579  const AVPacket *avpkt, unsigned size)
580 {
581  RASCContext *s = avctx->priv_data;
582  GetByteContext *gb = &s->gb;
583  unsigned pos;
584 
585  pos = bytestream2_tell(gb);
586  bytestream2_skip(gb, 8);
587  s->cursor_x = bytestream2_get_le32(gb);
588  s->cursor_y = bytestream2_get_le32(gb);
589 
590  bytestream2_skip(gb, size - (bytestream2_tell(gb) - pos));
591 
592  return 0;
593 }
594 
595 static void draw_cursor(AVCodecContext *avctx)
596 {
597  RASCContext *s = avctx->priv_data;
598  uint8_t *dst, *pal;
599 
600  if (!s->cursor)
601  return;
602 
603  if (s->cursor_x >= avctx->width || s->cursor_y >= avctx->height)
604  return;
605 
606  if (s->cursor_x + s->cursor_w > avctx->width ||
607  s->cursor_y + s->cursor_h > avctx->height)
608  return;
609 
610  if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
611  pal = s->frame->data[1];
612  for (int i = 0; i < s->cursor_h; i++) {
613  for (int j = 0; j < s->cursor_w; j++) {
614  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
615  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
616  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
617  int best = INT_MAX;
618  int index = 0;
619  int dist;
620 
621  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
622  continue;
623 
624  dst = s->frame->data[0] + s->frame->linesize[0] * (int)(s->cursor_y + i) + (int)(s->cursor_x + j);
625  for (int k = 0; k < 256; k++) {
626  int pr = pal[k * 4 + 0];
627  int pg = pal[k * 4 + 1];
628  int pb = pal[k * 4 + 2];
629 
630  dist = FFABS(cr - pr) + FFABS(cg - pg) + FFABS(cb - pb);
631  if (dist < best) {
632  best = dist;
633  index = k;
634  }
635  }
636  dst[0] = index;
637  }
638  }
639  } else if (avctx->pix_fmt == AV_PIX_FMT_RGB555LE) {
640  for (int i = 0; i < s->cursor_h; i++) {
641  for (int j = 0; j < s->cursor_w; j++) {
642  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
643  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
644  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
645 
646  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
647  continue;
648 
649  cr >>= 3; cg >>=3; cb >>= 3;
650  dst = s->frame->data[0] + s->frame->linesize[0] * (int)(s->cursor_y + i) + 2 * (s->cursor_x + j);
651  AV_WL16(dst, cr | cg << 5 | cb << 10);
652  }
653  }
654  } else if (avctx->pix_fmt == AV_PIX_FMT_BGR0) {
655  for (int i = 0; i < s->cursor_h; i++) {
656  for (int j = 0; j < s->cursor_w; j++) {
657  int cr = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 0];
658  int cg = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 1];
659  int cb = s->cursor[3 * s->cursor_w * (s->cursor_h - i - 1) + 3 * j + 2];
660 
661  if (cr == s->cursor[0] && cg == s->cursor[1] && cb == s->cursor[2])
662  continue;
663 
664  dst = s->frame->data[0] + s->frame->linesize[0] * (int)(s->cursor_y + i) + 4 * (s->cursor_x + j);
665  dst[0] = cb;
666  dst[1] = cg;
667  dst[2] = cr;
668  }
669  }
670  }
671 }
672 
674  int *got_frame, AVPacket *avpkt)
675 {
676  RASCContext *s = avctx->priv_data;
677  GetByteContext *gb = &s->gb;
678  int ret, intra = 0;
679 
680  bytestream2_init(gb, avpkt->data, avpkt->size);
681 
682  if (bytestream2_peek_le32(gb) == EMPT)
683  return avpkt->size;
684 
685  s->frame = frame;
686 
687  while (bytestream2_get_bytes_left(gb) > 0) {
688  unsigned type, size = 0;
689 
690  if (bytestream2_get_bytes_left(gb) < 8)
691  return AVERROR_INVALIDDATA;
692 
693  type = bytestream2_get_le32(gb);
694  if (type == KBND || type == BNDL) {
695  intra = type == KBND;
696  type = bytestream2_get_le32(gb);
697  }
698 
699  size = bytestream2_get_le32(gb);
701  return AVERROR_INVALIDDATA;
702 
703  switch (type) {
704  case FINT:
705  case INIT:
706  ret = decode_fint(avctx, avpkt, size);
707  break;
708  case KFRM:
709  ret = decode_kfrm(avctx, avpkt, size);
710  break;
711  case DLTA:
712  ret = decode_dlta(avctx, avpkt, size);
713  break;
714  case MOVE:
715  ret = decode_move(avctx, avpkt, size);
716  break;
717  case MOUS:
718  ret = decode_mous(avctx, avpkt, size);
719  break;
720  case MPOS:
721  ret = decode_mpos(avctx, avpkt, size);
722  break;
723  default:
724  bytestream2_skip(gb, size);
725  ret = 0;
726  }
727 
728  if (ret < 0)
729  return ret;
730  }
731 
732  if (!s->frame2->data[0] || !s->frame1->data[0])
733  return AVERROR_INVALIDDATA;
734 
735  if ((ret = ff_get_buffer(avctx, s->frame, 0)) < 0)
736  return ret;
737 
738  copy_plane(avctx, s->frame2, s->frame);
739  if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
740  memcpy(s->frame->data[1], s->frame2->data[1], 1024);
741  if (!s->skip_cursor)
742  draw_cursor(avctx);
743 
744  if (intra)
745  s->frame->flags |= AV_FRAME_FLAG_KEY;
746  else
747  s->frame->flags &= ~AV_FRAME_FLAG_KEY;
748  s->frame->pict_type = intra ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
749 
750  *got_frame = 1;
751 
752  return avpkt->size;
753 }
754 
756 {
757  RASCContext *s = avctx->priv_data;
758 
759  s->frame1 = av_frame_alloc();
760  s->frame2 = av_frame_alloc();
761  if (!s->frame1 || !s->frame2)
762  return AVERROR(ENOMEM);
763 
764  return ff_inflate_init(&s->zstream, avctx);
765 }
766 
768 {
769  RASCContext *s = avctx->priv_data;
770 
771  av_freep(&s->cursor);
772  s->cursor_size = 0;
773  av_freep(&s->delta);
774  s->delta_size = 0;
775  av_frame_free(&s->frame1);
776  av_frame_free(&s->frame2);
777  ff_inflate_end(&s->zstream);
778 
779  return 0;
780 }
781 
782 static void decode_flush(AVCodecContext *avctx)
783 {
784  RASCContext *s = avctx->priv_data;
785 
786  clear_plane(avctx, s->frame1);
787  clear_plane(avctx, s->frame2);
788 }
789 
790 static const AVOption options[] = {
791 { "skip_cursor", "skip the cursor", offsetof(RASCContext, skip_cursor), AV_OPT_TYPE_BOOL, {.i64 = 0 }, 0, 1, AV_OPT_FLAG_DECODING_PARAM | AV_OPT_FLAG_VIDEO_PARAM },
792 { NULL },
793 };
794 
795 static const AVClass rasc_decoder_class = {
796  .class_name = "rasc decoder",
797  .item_name = av_default_item_name,
798  .option = options,
799  .version = LIBAVUTIL_VERSION_INT,
800 };
801 
803  .p.name = "rasc",
804  CODEC_LONG_NAME("RemotelyAnywhere Screen Capture"),
805  .p.type = AVMEDIA_TYPE_VIDEO,
806  .p.id = AV_CODEC_ID_RASC,
807  .priv_data_size = sizeof(RASCContext),
808  .init = decode_init,
809  .close = decode_close,
811  .flush = decode_flush,
812  .p.capabilities = AV_CODEC_CAP_DR1,
813  .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
814  .p.priv_class = &rasc_decoder_class,
815 };
RASCContext
Definition: rasc.c:48
init_frames
static int init_frames(AVCodecContext *avctx)
Definition: rasc.c:95
FF_CODEC_CAP_INIT_CLEANUP
#define FF_CODEC_CAP_INIT_CLEANUP
The codec allows calling the close function for deallocation even if the init function returned a fai...
Definition: codec_internal.h:43
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
BNDL
#define BNDL
Definition: rasc.c:40
opt.h
AV_WL32
#define AV_WL32(p, v)
Definition: intreadwrite.h:422
cb
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:247
GetByteContext
Definition: bytestream.h:33
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:251
FINT
#define FINT
Definition: rasc.c:38
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
RASCContext::delta_size
int delta_size
Definition: rasc.c:53
w
uint8_t w
Definition: llviddspenc.c:38
RASCContext::zstream
FFZStream zstream
Definition: rasc.c:65
AVPacket::data
uint8_t * data
Definition: packet.h:539
AVOption
AVOption.
Definition: opt.h:429
RASCContext::frame
AVFrame * frame
Definition: rasc.c:62
FFCodec
Definition: codec_internal.h:127
options
static const AVOption options[]
Definition: rasc.c:790
EMPT
#define EMPT
Definition: rasc.c:46
decode_fint
static int decode_fint(AVCodecContext *avctx, const AVPacket *avpkt, unsigned size)
Definition: rasc.c:114
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:94
bytestream2_skip
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:168
decode_dlta
static int decode_dlta(AVCodecContext *avctx, const AVPacket *avpkt, unsigned size)
Definition: rasc.c:332
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
b1
static double b1(void *priv, double x, double y)
Definition: vf_xfade.c:2034
inflate
static void inflate(uint8_t *dst, const uint8_t *p1, int width, int threshold, const uint8_t *coordinates[], int coord, int maxc)
Definition: vf_neighbor.c:194
NEXT_LINE
#define NEXT_LINE
Definition: rasc.c:323
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
ff_rasc_decoder
const FFCodec ff_rasc_decoder
Definition: rasc.c:802
copy_plane
static void copy_plane(AVCodecContext *avctx, AVFrame *src, AVFrame *dst)
Definition: rasc.c:82
RASCContext::frame2
AVFrame * frame2
Definition: rasc.c:64
RASCContext::cursor_x
unsigned cursor_x
Definition: rasc.c:58
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
zlib_wrapper.h
av_cold
#define av_cold
Definition: attributes.h:90
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:640
RASCContext::stride
int stride
Definition: rasc.c:60
rasc_decoder_class
static const AVClass rasc_decoder_class
Definition: rasc.c:795
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:311
s
#define s(width, name)
Definition: cbs_vp9.c:198
RASCContext::cursor_h
unsigned cursor_h
Definition: rasc.c:57
decode_kfrm
static int decode_kfrm(AVCodecContext *avctx, const AVPacket *avpkt, unsigned size)
Definition: rasc.c:474
decode_flush
static void decode_flush(AVCodecContext *avctx)
Definition: rasc.c:782
decode.h
draw_cursor
static void draw_cursor(AVCodecContext *avctx)
Definition: rasc.c:595
decode_move
static int decode_move(AVCodecContext *avctx, const AVPacket *avpkt, unsigned size)
Definition: rasc.c:208
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:296
FFABS
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
Definition: common.h:74
if
if(ret)
Definition: filter_design.txt:179
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
NULL
#define NULL
Definition: coverity.c:32
INIT
#define INIT
Definition: rasc.c:39
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
decode_mous
static int decode_mous(AVCodecContext *avctx, const AVPacket *avpkt, unsigned size)
Definition: rasc.c:538
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
MOVE
#define MOVE
Definition: rasc.c:45
RASCContext::cursor_y
unsigned cursor_y
Definition: rasc.c:59
RASCContext::skip_cursor
int skip_cursor
Definition: rasc.c:50
KBND
#define KBND
Definition: rasc.c:37
index
int index
Definition: gxfenc.c:90
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
bytestream2_tell
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
DLTA
#define DLTA
Definition: rasc.c:42
decode_zlib
static int decode_zlib(AVCodecContext *avctx, const AVPacket *avpkt, unsigned size, unsigned uncompressed_size)
Definition: rasc.c:174
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1697
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
RASCContext::cursor_size
int cursor_size
Definition: rasc.c:55
AVPacket::size
int size
Definition: packet.h:540
dc
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff) *mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
codec_internal.h
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
for
for(k=2;k<=8;++k)
Definition: h264pred_template.c:425
AV_CODEC_ID_RASC
@ AV_CODEC_ID_RASC
Definition: codec_id.h:295
size
int size
Definition: twinvq_data.h:10344
RASCContext::cursor
uint8_t * cursor
Definition: rasc.c:54
RASCContext::cursor_w
unsigned cursor_w
Definition: rasc.c:56
RASCContext::bpp
int bpp
Definition: rasc.c:61
AV_WL16
#define AV_WL16(p, v)
Definition: intreadwrite.h:408
b2
static double b2(void *priv, double x, double y)
Definition: vf_xfade.c:2035
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
KFRM
#define KFRM
Definition: rasc.c:41
AV_PIX_FMT_RGB555LE
@ AV_PIX_FMT_RGB555LE
packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:115
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
decode_mpos
static int decode_mpos(AVCodecContext *avctx, const AVPacket *avpkt, unsigned size)
Definition: rasc.c:578
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: rasc.c:755
av_fast_padded_malloc
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:52
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_OPT_FLAG_VIDEO_PARAM
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:358
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:610
MPOS
#define MPOS
Definition: rasc.c:44
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:194
len
int len
Definition: vorbis_enc_data.h:426
ff_inflate_end
void ff_inflate_end(FFZStream *zstream)
Wrapper around inflateEnd().
AVCodecContext::height
int height
Definition: avcodec.h:624
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:663
avcodec.h
decode_close
static av_cold int decode_close(AVCodecContext *avctx)
Definition: rasc.c:767
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:84
ret
ret
Definition: filter_design.txt:187
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:80
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
pos
unsigned int pos
Definition: spdifenc.c:414
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
AVCodecContext
main external API structure.
Definition: avcodec.h:451
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
clear_plane
static void clear_plane(AVCodecContext *avctx, AVFrame *frame)
Definition: rasc.c:68
MOUS
#define MOUS
Definition: rasc.c:43
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
A generic parameter which can be set by the user for demuxing or decoding.
Definition: opt.h:356
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
FFZStream
Definition: zlib_wrapper.h:27
mem.h
avpriv_request_sample
#define avpriv_request_sample(...)
Definition: tableprint_vlc.h:36
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
cr
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:248
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
ff_inflate_init
int ff_inflate_init(FFZStream *zstream, void *logctx)
Wrapper around inflateInit().
RASCContext::gb
GetByteContext gb
Definition: rasc.c:51
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:624
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: rasc.c:673
bytestream.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
RASCContext::delta
uint8_t * delta
Definition: rasc.c:52
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
h
h
Definition: vp9dsp_template.c:2070
RASCContext::frame1
AVFrame * frame1
Definition: rasc.c:63
src
#define src
Definition: vp8dsp.c:248
mc
#define mc
Definition: vf_colormatrix.c:100