FFmpeg
notchlc.c
Go to the documentation of this file.
1 /*
2  * NotchLC decoder
3  * Copyright (c) 2020 Paul B Mahol
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <stdio.h>
23 #include <string.h>
24 
25 #define BITSTREAM_READER_LE
26 #include "avcodec.h"
27 #include "bytestream.h"
28 #include "codec_internal.h"
29 #include "decode.h"
30 #include "get_bits.h"
31 #include "lzf.h"
32 #include "thread.h"
33 
34 typedef struct NotchLCContext {
35  unsigned compressed_size;
36  unsigned format;
37 
40 
41  uint8_t *lzf_buffer;
42  int64_t lzf_size;
43 
44  unsigned texture_size_x;
45  unsigned texture_size_y;
50  unsigned y_data_offset;
51  unsigned uv_data_offset;
52  unsigned y_data_size;
53  unsigned a_data_offset;
54  unsigned uv_count_offset;
55  unsigned a_count_size;
56  unsigned data_end;
57 
61 
63 {
66  avctx->colorspace = AVCOL_SPC_RGB;
69 
70  return 0;
71 }
72 
73 #define HISTORY_SIZE (64 * 1024)
74 
75 static int lz4_decompress(AVCodecContext *avctx,
76  GetByteContext *gb,
77  PutByteContext *pb)
78 {
79  unsigned reference_pos, match_length, delta, pos = 0;
80  uint8_t history[64 * 1024];
81 
82  while (bytestream2_get_bytes_left(gb) > 0) {
83  uint8_t token = bytestream2_get_byte(gb);
84  unsigned num_literals = token >> 4;
85 
86  if (num_literals == 15) {
87  unsigned char current;
88  do {
89  current = bytestream2_get_byte(gb);
90  num_literals += current;
91  } while (current == 255);
92  }
93 
94  if (pos + num_literals < HISTORY_SIZE) {
95  bytestream2_get_buffer(gb, history + pos, num_literals);
96  pos += num_literals;
97  } else {
98  while (num_literals-- > 0) {
99  history[pos++] = bytestream2_get_byte(gb);
100  if (pos == HISTORY_SIZE) {
101  bytestream2_put_buffer(pb, history, HISTORY_SIZE);
102  pos = 0;
103  }
104  }
105  }
106 
107  if (bytestream2_get_bytes_left(gb) <= 0)
108  break;
109 
110  delta = bytestream2_get_le16(gb);
111  if (delta == 0)
112  return 0;
113  match_length = 4 + (token & 0x0F);
114  if (match_length == 4 + 0x0F) {
115  uint8_t current;
116 
117  do {
118  current = bytestream2_get_byte(gb);
119  match_length += current;
120  } while (current == 255);
121  }
122  reference_pos = (pos >= delta) ? (pos - delta) : (HISTORY_SIZE + pos - delta);
123  if (pos + match_length < HISTORY_SIZE && reference_pos + match_length < HISTORY_SIZE) {
124  if (pos >= reference_pos + match_length || reference_pos >= pos + match_length) {
125  memcpy(history + pos, history + reference_pos, match_length);
126  pos += match_length;
127  } else {
128  while (match_length-- > 0)
129  history[pos++] = history[reference_pos++];
130  }
131  } else {
132  while (match_length-- > 0) {
133  history[pos++] = history[reference_pos++];
134  if (pos == HISTORY_SIZE) {
135  bytestream2_put_buffer(pb, history, HISTORY_SIZE);
136  pos = 0;
137  }
138  reference_pos %= HISTORY_SIZE;
139  }
140  }
141  }
142 
143  bytestream2_put_buffer(pb, history, pos);
144 
145  return bytestream2_tell_p(pb);
146 }
147 
148 static int decode_blocks(AVCodecContext *avctx, AVFrame *p,
149  unsigned uncompressed_size)
150 {
151  NotchLCContext *s = avctx->priv_data;
152  GetByteContext rgb, dgb, *gb = &s->gb;
154  int ylinesize, ulinesize, vlinesize, alinesize;
155  uint16_t *dsty, *dstu, *dstv, *dsta;
156  int ret;
157 
158  s->texture_size_x = bytestream2_get_le32(gb);
159  s->texture_size_y = bytestream2_get_le32(gb);
160 
161  ret = ff_set_dimensions(avctx, s->texture_size_x, s->texture_size_y);
162  if (ret < 0)
163  return ret;
164 
165  s->uv_offset_data_offset = bytestream2_get_le32(gb);
166  if (s->uv_offset_data_offset >= UINT_MAX / 4)
167  return AVERROR_INVALIDDATA;
168  s->uv_offset_data_offset *= 4;
169  if (s->uv_offset_data_offset >= uncompressed_size)
170  return AVERROR_INVALIDDATA;
171 
172  s->y_control_data_offset = bytestream2_get_le32(gb);
173  if (s->y_control_data_offset >= UINT_MAX / 4)
174  return AVERROR_INVALIDDATA;
175  s->y_control_data_offset *= 4;
176  if (s->y_control_data_offset >= uncompressed_size)
177  return AVERROR_INVALIDDATA;
178 
179  s->a_control_word_offset = bytestream2_get_le32(gb);
180  if (s->a_control_word_offset >= UINT_MAX / 4)
181  return AVERROR_INVALIDDATA;
182  s->a_control_word_offset *= 4;
183  if (s->a_control_word_offset >= uncompressed_size)
184  return AVERROR_INVALIDDATA;
185 
186  s->uv_data_offset = bytestream2_get_le32(gb);
187  if (s->uv_data_offset >= UINT_MAX / 4)
188  return AVERROR_INVALIDDATA;
189  s->uv_data_offset *= 4;
190  if (s->uv_data_offset >= uncompressed_size)
191  return AVERROR_INVALIDDATA;
192 
193  s->y_data_size = bytestream2_get_le32(gb);
194  if (s->y_data_size >= UINT_MAX / 4)
195  return AVERROR_INVALIDDATA;
196 
197  s->a_data_offset = bytestream2_get_le32(gb);
198  if (s->a_data_offset >= UINT_MAX / 4)
199  return AVERROR_INVALIDDATA;
200  s->a_data_offset *= 4;
201  if (s->a_data_offset >= uncompressed_size)
202  return AVERROR_INVALIDDATA;
203 
204  s->a_count_size = bytestream2_get_le32(gb);
205  if (s->a_count_size >= UINT_MAX / 4)
206  return AVERROR_INVALIDDATA;
207  s->a_count_size *= 4;
208  if (s->a_count_size >= uncompressed_size)
209  return AVERROR_INVALIDDATA;
210 
211  s->data_end = bytestream2_get_le32(gb);
212  if (s->data_end > uncompressed_size)
213  return AVERROR_INVALIDDATA;
214 
215  s->y_data_row_offsets = bytestream2_tell(gb);
216  if (s->data_end <= s->y_data_size)
217  return AVERROR_INVALIDDATA;
218  s->y_data_offset = s->data_end - s->y_data_size;
219  if (s->y_data_offset <= s->a_data_offset)
220  return AVERROR_INVALIDDATA;
221  s->uv_count_offset = s->y_data_offset - s->a_data_offset;
222 
223  if ((ret = ff_thread_get_buffer(avctx, p, 0)) < 0)
224  return ret;
225 
226  rgb = *gb;
227  dgb = *gb;
228  bytestream2_seek(&rgb, s->y_data_row_offsets, SEEK_SET);
229  bytestream2_seek(gb, s->y_control_data_offset, SEEK_SET);
230 
231  if (bytestream2_get_bytes_left(gb) < (avctx->height + 3) / 4 * ((avctx->width + 3) / 4) * 4)
232  return AVERROR_INVALIDDATA;
233 
234  dsty = (uint16_t *)p->data[0];
235  dsta = (uint16_t *)p->data[3];
236  ylinesize = p->linesize[0] / 2;
237  alinesize = p->linesize[3] / 2;
238 
239  for (int y = 0; y < avctx->height; y += 4) {
240  const unsigned row_offset = bytestream2_get_le32(&rgb);
241 
242  bytestream2_seek(&dgb, s->y_data_offset + row_offset, SEEK_SET);
243 
245  for (int x = 0; x < avctx->width; x += 4) {
246  unsigned item = bytestream2_get_le32(gb);
247  unsigned y_min = item & 4095;
248  unsigned y_max = (item >> 12) & 4095;
249  unsigned y_diff = y_max - y_min;
250  unsigned control[4];
251 
252  control[0] = (item >> 24) & 3;
253  control[1] = (item >> 26) & 3;
254  control[2] = (item >> 28) & 3;
255  control[3] = (item >> 30) & 3;
256 
257  for (int i = 0; i < 4; i++) {
258  const int nb_bits = control[i] + 1;
259  const int div = (1 << nb_bits) - 1;
260  const int add = div - 1;
261 
262  dsty[x + i * ylinesize + 0] = av_clip_uintp2(y_min + ((y_diff * get_bits(&bit, nb_bits) + add) / div), 12);
263  dsty[x + i * ylinesize + 1] = av_clip_uintp2(y_min + ((y_diff * get_bits(&bit, nb_bits) + add) / div), 12);
264  dsty[x + i * ylinesize + 2] = av_clip_uintp2(y_min + ((y_diff * get_bits(&bit, nb_bits) + add) / div), 12);
265  dsty[x + i * ylinesize + 3] = av_clip_uintp2(y_min + ((y_diff * get_bits(&bit, nb_bits) + add) / div), 12);
266  }
267  }
268 
269  dsty += 4 * ylinesize;
270  }
271 
272  rgb = *gb;
273  dgb = *gb;
274  bytestream2_seek(gb, s->a_control_word_offset, SEEK_SET);
275  if (s->uv_count_offset == s->a_control_word_offset) {
276  for (int y = 0; y < avctx->height; y++) {
277  for (int x = 0; x < avctx->width; x++)
278  dsta[x] = 4095;
279  dsta += alinesize;
280  }
281  } else {
282  if (bytestream2_get_bytes_left(gb) < (avctx->height + 15) / 16 * ((avctx->width + 15) / 16) * 8)
283  return AVERROR_INVALIDDATA;
284 
285  for (int y = 0; y < avctx->height; y += 16) {
286  for (int x = 0; x < avctx->width; x += 16) {
287  unsigned m = bytestream2_get_le32(gb);
288  unsigned offset = bytestream2_get_le32(gb);
289  unsigned alpha0, alpha1;
290  uint64_t control;
291 
292  if (offset >= UINT_MAX / 4)
293  return AVERROR_INVALIDDATA;
294  offset = offset * 4 + s->uv_data_offset + s->a_data_offset;
295  if (offset >= s->data_end)
296  return AVERROR_INVALIDDATA;
297 
298  bytestream2_seek(&dgb, offset, SEEK_SET);
299  control = bytestream2_get_le64(&dgb);
300  alpha0 = control & 0xFF;
301  alpha1 = (control >> 8) & 0xFF;
302  control = control >> 16;
303 
304  for (int by = 0; by < 4; by++) {
305  for (int bx = 0; bx < 4; bx++) {
306  switch (m & 3) {
307  case 0:
308  for (int i = 0; i < 4; i++) {
309  for (int j = 0; j < 4; j++) {
310  dsta[x + (i + by * 4) * alinesize + bx * 4 + j] = 0;
311  }
312  }
313  break;
314  case 1:
315  for (int i = 0; i < 4; i++) {
316  for (int j = 0; j < 4; j++) {
317  dsta[x + (i + by * 4) * alinesize + bx * 4 + j] = 4095;
318  }
319  }
320  break;
321  case 2:
322  for (int i = 0; i < 4; i++) {
323  for (int j = 0; j < 4; j++) {
324  dsta[x + (i + by * 4) * alinesize + bx * 4 + j] = (alpha0 + (alpha1 - alpha0) * (control & 7)) << 4;
325  }
326  }
327  break;
328  default:
329  return AVERROR_INVALIDDATA;
330  }
331 
332  control >>= 3;
333  m >>= 2;
334  }
335  }
336  }
337 
338  dsta += 16 * alinesize;
339  }
340  }
341 
342  bytestream2_seek(&rgb, s->uv_offset_data_offset, SEEK_SET);
343 
344  dstu = (uint16_t *)p->data[1];
345  dstv = (uint16_t *)p->data[2];
346  ulinesize = p->linesize[1] / 2;
347  vlinesize = p->linesize[2] / 2;
348 
349  for (int y = 0; y < avctx->height; y += 16) {
350  for (int x = 0; x < avctx->width; x += 16) {
351  unsigned offset = bytestream2_get_le32(&rgb) * 4;
352  int u[16][16] = { 0 }, v[16][16] = { 0 };
353  int u0, v0, u1, v1, udif, vdif;
354  unsigned escape, is8x8, loc;
355 
356  bytestream2_seek(&dgb, s->uv_data_offset + offset, SEEK_SET);
357 
358  is8x8 = bytestream2_get_le16(&dgb);
359  escape = bytestream2_get_le16(&dgb);
360 
361  if (escape == 0 && is8x8 == 0) {
362  u0 = bytestream2_get_byte(&dgb);
363  v0 = bytestream2_get_byte(&dgb);
364  u1 = bytestream2_get_byte(&dgb);
365  v1 = bytestream2_get_byte(&dgb);
366  loc = bytestream2_get_le32(&dgb);
367  u0 = (u0 << 4) | (u0 & 0xF);
368  v0 = (v0 << 4) | (v0 & 0xF);
369  u1 = (u1 << 4) | (u1 & 0xF);
370  v1 = (v1 << 4) | (v1 & 0xF);
371  udif = u1 - u0;
372  vdif = v1 - v0;
373 
374  for (int i = 0; i < 16; i += 4) {
375  for (int j = 0; j < 16; j += 4) {
376  for (int ii = 0; ii < 4; ii++) {
377  for (int jj = 0; jj < 4; jj++) {
378  u[i + ii][j + jj] = u0 + ((udif * (int)(loc & 3) + 2) / 3);
379  v[i + ii][j + jj] = v0 + ((vdif * (int)(loc & 3) + 2) / 3);
380  }
381  }
382 
383  loc >>= 2;
384  }
385  }
386  } else {
387  for (int i = 0; i < 16; i += 8) {
388  for (int j = 0; j < 16; j += 8) {
389  if (is8x8 & 1) {
390  u0 = bytestream2_get_byte(&dgb);
391  v0 = bytestream2_get_byte(&dgb);
392  u1 = bytestream2_get_byte(&dgb);
393  v1 = bytestream2_get_byte(&dgb);
394  loc = bytestream2_get_le32(&dgb);
395  u0 = (u0 << 4) | (u0 & 0xF);
396  v0 = (v0 << 4) | (v0 & 0xF);
397  u1 = (u1 << 4) | (u1 & 0xF);
398  v1 = (v1 << 4) | (v1 & 0xF);
399  udif = u1 - u0;
400  vdif = v1 - v0;
401 
402  for (int ii = 0; ii < 8; ii += 2) {
403  for (int jj = 0; jj < 8; jj += 2) {
404  for (int iii = 0; iii < 2; iii++) {
405  for (int jjj = 0; jjj < 2; jjj++) {
406  u[i + ii + iii][j + jj + jjj] = u0 + ((udif * (int)(loc & 3) + 2) / 3);
407  v[i + ii + iii][j + jj + jjj] = v0 + ((vdif * (int)(loc & 3) + 2) / 3);
408  }
409  }
410 
411  loc >>= 2;
412  }
413  }
414  } else if (escape) {
415  for (int ii = 0; ii < 8; ii += 4) {
416  for (int jj = 0; jj < 8; jj += 4) {
417  u0 = bytestream2_get_byte(&dgb);
418  v0 = bytestream2_get_byte(&dgb);
419  u1 = bytestream2_get_byte(&dgb);
420  v1 = bytestream2_get_byte(&dgb);
421  loc = bytestream2_get_le32(&dgb);
422  u0 = (u0 << 4) | (u0 & 0xF);
423  v0 = (v0 << 4) | (v0 & 0xF);
424  u1 = (u1 << 4) | (u1 & 0xF);
425  v1 = (v1 << 4) | (v1 & 0xF);
426  udif = u1 - u0;
427  vdif = v1 - v0;
428 
429  for (int iii = 0; iii < 4; iii++) {
430  for (int jjj = 0; jjj < 4; jjj++) {
431  u[i + ii + iii][j + jj + jjj] = u0 + ((udif * (int)(loc & 3) + 2) / 3);
432  v[i + ii + iii][j + jj + jjj] = v0 + ((vdif * (int)(loc & 3) + 2) / 3);
433 
434  loc >>= 2;
435  }
436  }
437  }
438  }
439  }
440 
441  is8x8 >>= 1;
442  }
443  }
444  }
445 
446  for (int i = 0; i < 16; i++) {
447  for (int j = 0; j < 16; j++) {
448  dstu[x + i * ulinesize + j] = u[i][j];
449  dstv[x + i * vlinesize + j] = v[i][j];
450  }
451  }
452  }
453 
454  dstu += 16 * ulinesize;
455  dstv += 16 * vlinesize;
456  }
457 
458  return 0;
459 }
460 
461 static int decode_frame(AVCodecContext *avctx, AVFrame *p,
462  int *got_frame, AVPacket *avpkt)
463 {
464  NotchLCContext *s = avctx->priv_data;
465  GetByteContext *gb = &s->gb;
466  PutByteContext *pb = &s->pb;
467  unsigned uncompressed_size;
468  int ret;
469 
470  if (avpkt->size <= 40)
471  return AVERROR_INVALIDDATA;
472 
473  bytestream2_init(gb, avpkt->data, avpkt->size);
474 
475  if (bytestream2_get_le32(gb) != MKBETAG('N','L','C','1'))
476  return AVERROR_INVALIDDATA;
477 
478  uncompressed_size = bytestream2_get_le32(gb);
479  s->compressed_size = bytestream2_get_le32(gb);
480  s->format = bytestream2_get_le32(gb);
481 
482  if (s->format > 2)
483  return AVERROR_PATCHWELCOME;
484 
485  if (s->format == 0) {
486  ret = ff_lzf_uncompress(gb, &s->lzf_buffer, &s->lzf_size);
487  if (ret < 0)
488  return ret;
489 
490  if (uncompressed_size > s->lzf_size)
491  return AVERROR_INVALIDDATA;
492 
493  bytestream2_init(gb, s->lzf_buffer, uncompressed_size);
494  } else if (s->format == 1) {
495  if (bytestream2_get_bytes_left(gb) < uncompressed_size / 255)
496  return AVERROR_INVALIDDATA;
497 
498  av_fast_padded_malloc(&s->uncompressed_buffer, &s->uncompressed_size,
499  uncompressed_size);
500  if (!s->uncompressed_buffer)
501  return AVERROR(ENOMEM);
502 
503  bytestream2_init_writer(pb, s->uncompressed_buffer, s->uncompressed_size);
504 
505  ret = lz4_decompress(avctx, gb, pb);
506  if (ret != uncompressed_size)
507  return AVERROR_INVALIDDATA;
508 
509  bytestream2_init(gb, s->uncompressed_buffer, uncompressed_size);
510  }
511 
512  ret = decode_blocks(avctx, p, uncompressed_size);
513  if (ret < 0)
514  return ret;
515 
517  p->key_frame = 1;
518 
519  *got_frame = 1;
520 
521  return avpkt->size;
522 }
523 
525 {
526  NotchLCContext *s = avctx->priv_data;
527 
528  av_freep(&s->uncompressed_buffer);
529  s->uncompressed_size = 0;
530  av_freep(&s->lzf_buffer);
531  s->lzf_size = 0;
532 
533  return 0;
534 }
535 
537  .p.name = "notchlc",
538  CODEC_LONG_NAME("NotchLC"),
539  .p.type = AVMEDIA_TYPE_VIDEO,
540  .p.id = AV_CODEC_ID_NOTCHLC,
541  .priv_data_size = sizeof(NotchLCContext),
542  .init = decode_init,
543  .close = decode_end,
545  .p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
546 };
NotchLCContext::lzf_buffer
uint8_t * lzf_buffer
Definition: notchlc.c:41
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1002
GetByteContext
Definition: bytestream.h:33
NotchLCContext::y_data_offset
unsigned y_data_offset
Definition: notchlc.c:50
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:262
av_clip_uintp2
#define av_clip_uintp2
Definition: common.h:119
bytestream2_seek
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
Definition: bytestream.h:212
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:995
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:661
AVPacket::data
uint8_t * data
Definition: packet.h:374
bytestream2_tell_p
static av_always_inline int bytestream2_tell_p(PutByteContext *p)
Definition: bytestream.h:197
FFCodec
Definition: codec_internal.h:127
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:588
NotchLCContext::compressed_size
unsigned compressed_size
Definition: notchlc.c:35
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:91
thread.h
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
decode_blocks
static int decode_blocks(AVCodecContext *avctx, AVFrame *p, unsigned uncompressed_size)
Definition: notchlc.c:148
bit
#define bit(string, value)
Definition: cbs_mpeg2.c:58
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:325
rgb
Definition: rpzaenc.c:59
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
AVCOL_TRC_IEC61966_2_1
@ AVCOL_TRC_IEC61966_2_1
IEC 61966-2-1 (sRGB or sYCC)
Definition: pixfmt.h:572
v0
#define v0
Definition: regdef.h:26
NotchLCContext::texture_size_x
unsigned texture_size_x
Definition: notchlc.c:44
GetBitContext
Definition: get_bits.h:107
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
AVFrame::key_frame
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:422
NotchLCContext::format
unsigned format
Definition: notchlc.c:36
decode_init
static av_cold int decode_init(AVCodecContext *avctx)
Definition: notchlc.c:62
NotchLCContext::texture_size_y
unsigned texture_size_y
Definition: notchlc.c:45
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:988
av_cold
#define av_cold
Definition: attributes.h:90
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:524
NotchLCContext::pb
PutByteContext pb
Definition: notchlc.c:59
bytestream2_init_writer
static av_always_inline void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size)
Definition: bytestream.h:147
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:306
s
#define s(width, name)
Definition: cbs_vp9.c:256
NotchLCContext::y_data_size
unsigned y_data_size
Definition: notchlc.c:52
bytestream2_put_buffer
static av_always_inline unsigned int bytestream2_put_buffer(PutByteContext *p, const uint8_t *src, unsigned int size)
Definition: bytestream.h:286
GetByteContext::buffer
const uint8_t * buffer
Definition: bytestream.h:34
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts_bsf.c:365
AV_PIX_FMT_YUVA444P12
#define AV_PIX_FMT_YUVA444P12
Definition: pixfmt.h:500
decode.h
get_bits.h
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:272
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:107
NotchLCContext::lzf_size
int64_t lzf_size
Definition: notchlc.c:42
AVERROR_PATCHWELCOME
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:64
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1009
NotchLCContext::y_control_data_offset
unsigned y_control_data_offset
Definition: notchlc.c:48
AVCOL_PRI_BT709
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
Definition: pixfmt.h:535
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
NotchLCContext
Definition: notchlc.c:34
bytestream2_get_buffer
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
Definition: bytestream.h:267
bytestream2_get_bytes_left
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:158
bytestream2_tell
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:192
HISTORY_SIZE
#define HISTORY_SIZE
Definition: notchlc.c:73
PutByteContext
Definition: bytestream.h:37
AVFrame::pict_type
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:427
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AVPacket::size
int size
Definition: packet.h:375
codec_internal.h
NotchLCContext::gb
GetByteContext gb
Definition: notchlc.c:58
NotchLCContext::y_data_row_offsets
unsigned y_data_row_offsets
Definition: notchlc.c:46
MKBETAG
#define MKBETAG(a, b, c, d)
Definition: macros.h:56
ff_lzf_uncompress
int ff_lzf_uncompress(GetByteContext *gb, uint8_t **buf, int64_t *size)
Definition: lzf.c:40
NotchLCContext::uv_data_offset
unsigned uv_data_offset
Definition: notchlc.c:51
NotchLCContext::uv_count_offset
unsigned uv_count_offset
Definition: notchlc.c:54
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
AV_CODEC_ID_NOTCHLC
@ AV_CODEC_ID_NOTCHLC
Definition: codec_id.h:305
NotchLCContext::uv_offset_data_offset
unsigned uv_offset_data_offset
Definition: notchlc.c:47
NotchLCContext::data_end
unsigned data_end
Definition: notchlc.c:56
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
decode_frame
static int decode_frame(AVCodecContext *avctx, AVFrame *p, int *got_frame, AVPacket *avpkt)
Definition: notchlc.c:461
delta
float delta
Definition: vorbis_enc_data.h:430
av_fast_padded_malloc
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional AV_INPUT_BUFFER_PADDING_SIZE at the end w...
Definition: utils.c:49
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:191
AVCodecContext::height
int height
Definition: avcodec.h:598
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:635
avcodec.h
decode_end
static av_cold int decode_end(AVCodecContext *avctx)
Definition: notchlc.c:524
ret
ret
Definition: filter_design.txt:187
pos
unsigned int pos
Definition: spdifenc.c:413
AVCodecContext
main external API structure.
Definition: avcodec.h:426
NotchLCContext::uncompressed_size
unsigned uncompressed_size
Definition: notchlc.c:39
NotchLCContext::a_data_offset
unsigned a_data_offset
Definition: notchlc.c:53
ff_notchlc_decoder
const FFCodec ff_notchlc_decoder
Definition: notchlc.c:536
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
add
static float add(float src0, float src1)
Definition: dnn_backend_native_layer_mathbinary.c:35
lz4_decompress
static int lz4_decompress(AVCodecContext *avctx, GetByteContext *gb, PutByteContext *pb)
Definition: notchlc.c:75
AVPacket
This structure stores compressed data.
Definition: packet.h:351
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:453
NotchLCContext::a_control_word_offset
unsigned a_control_word_offset
Definition: notchlc.c:49
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:598
bytestream.h
bytestream2_init
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:137
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
rgb
static const SheerTable rgb[2]
Definition: sheervideodata.h:32
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
NotchLCContext::uncompressed_buffer
uint8_t * uncompressed_buffer
Definition: notchlc.c:38
NotchLCContext::a_count_size
unsigned a_count_size
Definition: notchlc.c:55
int
int
Definition: ffmpeg_filter.c:156
lzf.h