FFmpeg
msvideo1.c
Go to the documentation of this file.
1 /*
2  * Microsoft Video-1 Decoder
3  * Copyright (C) 2003 The FFmpeg project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Microsoft Video-1 Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the MS Video-1 format, visit:
26  * http://www.pcisys.net/~melanson/codecs/
27  */
28 
29 #include <stdio.h>
30 #include <stdlib.h>
31 #include <string.h>
32 
33 #include "libavutil/internal.h"
34 #include "libavutil/intreadwrite.h"
35 #include "avcodec.h"
36 #include "internal.h"
37 
38 #define PALETTE_COUNT 256
39 #define CHECK_STREAM_PTR(n) \
40  if ((stream_ptr + n) > s->size ) { \
41  av_log(s->avctx, AV_LOG_ERROR, " MS Video-1 warning: stream_ptr out of bounds (%d >= %d)\n", \
42  stream_ptr + n, s->size); \
43  return; \
44  }
45 
46 typedef struct Msvideo1Context {
47 
50 
51  const unsigned char *buf;
52  int size;
53 
54  int mode_8bit; /* if it's not 8-bit, it's 16-bit */
55 
56  uint32_t pal[256];
58 
60 {
61  Msvideo1Context *s = avctx->priv_data;
62 
63  s->avctx = avctx;
64 
65  if (avctx->width < 4 || avctx->height < 4)
66  return AVERROR_INVALIDDATA;
67 
68  /* figure out the colorspace based on the presence of a palette */
69  if (s->avctx->bits_per_coded_sample == 8) {
70  s->mode_8bit = 1;
71  avctx->pix_fmt = AV_PIX_FMT_PAL8;
72  if (avctx->extradata_size >= AVPALETTE_SIZE)
73  memcpy(s->pal, avctx->extradata, AVPALETTE_SIZE);
74  } else {
75  s->mode_8bit = 0;
76  avctx->pix_fmt = AV_PIX_FMT_RGB555;
77  }
78 
79  s->frame = av_frame_alloc();
80  if (!s->frame)
81  return AVERROR(ENOMEM);
82 
83  return 0;
84 }
85 
87 {
88  int block_ptr, pixel_ptr;
89  int total_blocks;
90  int pixel_x, pixel_y; /* pixel width and height iterators */
91  int block_x, block_y; /* block width and height iterators */
92  int blocks_wide, blocks_high; /* width and height in 4x4 blocks */
93  int block_inc;
94  int row_dec;
95 
96  /* decoding parameters */
97  int stream_ptr;
98  unsigned char byte_a, byte_b;
99  unsigned short flags;
100  int skip_blocks;
101  unsigned char colors[8];
102  unsigned char *pixels = s->frame->data[0];
103  int stride = s->frame->linesize[0];
104 
105  stream_ptr = 0;
106  skip_blocks = 0;
107  blocks_wide = s->avctx->width / 4;
108  blocks_high = s->avctx->height / 4;
109  total_blocks = blocks_wide * blocks_high;
110  block_inc = 4;
111  row_dec = stride + 4;
112 
113  for (block_y = blocks_high; block_y > 0; block_y--) {
114  block_ptr = ((block_y * 4) - 1) * stride;
115  for (block_x = blocks_wide; block_x > 0; block_x--) {
116  /* check if this block should be skipped */
117  if (skip_blocks) {
118  block_ptr += block_inc;
119  skip_blocks--;
120  total_blocks--;
121  continue;
122  }
123 
124  pixel_ptr = block_ptr;
125 
126  /* get the next two bytes in the encoded data stream */
127  CHECK_STREAM_PTR(2);
128  byte_a = s->buf[stream_ptr++];
129  byte_b = s->buf[stream_ptr++];
130 
131  /* check if the decode is finished */
132  if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0))
133  return;
134  else if ((byte_b & 0xFC) == 0x84) {
135  /* skip code, but don't count the current block */
136  skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
137  } else if (byte_b < 0x80) {
138  /* 2-color encoding */
139  flags = (byte_b << 8) | byte_a;
140 
141  CHECK_STREAM_PTR(2);
142  colors[0] = s->buf[stream_ptr++];
143  colors[1] = s->buf[stream_ptr++];
144 
145  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
146  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
147  pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
148  pixel_ptr -= row_dec;
149  }
150  } else if (byte_b >= 0x90) {
151  /* 8-color encoding */
152  flags = (byte_b << 8) | byte_a;
153 
154  CHECK_STREAM_PTR(8);
155  memcpy(colors, &s->buf[stream_ptr], 8);
156  stream_ptr += 8;
157 
158  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
159  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
160  pixels[pixel_ptr++] =
161  colors[((pixel_y & 0x2) << 1) +
162  (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
163  pixel_ptr -= row_dec;
164  }
165  } else {
166  /* 1-color encoding */
167  colors[0] = byte_a;
168 
169  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
170  for (pixel_x = 0; pixel_x < 4; pixel_x++)
171  pixels[pixel_ptr++] = colors[0];
172  pixel_ptr -= row_dec;
173  }
174  }
175 
176  block_ptr += block_inc;
177  total_blocks--;
178  }
179  }
180 
181  /* make the palette available on the way out */
182  if (s->avctx->pix_fmt == AV_PIX_FMT_PAL8)
183  memcpy(s->frame->data[1], s->pal, AVPALETTE_SIZE);
184 }
185 
187 {
188  int block_ptr, pixel_ptr;
189  int total_blocks;
190  int pixel_x, pixel_y; /* pixel width and height iterators */
191  int block_x, block_y; /* block width and height iterators */
192  int blocks_wide, blocks_high; /* width and height in 4x4 blocks */
193  int block_inc;
194  int row_dec;
195 
196  /* decoding parameters */
197  int stream_ptr;
198  unsigned char byte_a, byte_b;
199  unsigned short flags;
200  int skip_blocks;
201  unsigned short colors[8];
202  unsigned short *pixels = (unsigned short *)s->frame->data[0];
203  int stride = s->frame->linesize[0] / 2;
204 
205  stream_ptr = 0;
206  skip_blocks = 0;
207  blocks_wide = s->avctx->width / 4;
208  blocks_high = s->avctx->height / 4;
209  total_blocks = blocks_wide * blocks_high;
210  block_inc = 4;
211  row_dec = stride + 4;
212 
213  for (block_y = blocks_high; block_y > 0; block_y--) {
214  block_ptr = ((block_y * 4) - 1) * stride;
215  for (block_x = blocks_wide; block_x > 0; block_x--) {
216  /* check if this block should be skipped */
217  if (skip_blocks) {
218  block_ptr += block_inc;
219  skip_blocks--;
220  total_blocks--;
221  continue;
222  }
223 
224  pixel_ptr = block_ptr;
225 
226  /* get the next two bytes in the encoded data stream */
227  CHECK_STREAM_PTR(2);
228  byte_a = s->buf[stream_ptr++];
229  byte_b = s->buf[stream_ptr++];
230 
231  /* check if the decode is finished */
232  if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0)) {
233  return;
234  } else if ((byte_b & 0xFC) == 0x84) {
235  /* skip code, but don't count the current block */
236  skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
237  } else if (byte_b < 0x80) {
238  /* 2- or 8-color encoding modes */
239  flags = (byte_b << 8) | byte_a;
240 
241  CHECK_STREAM_PTR(4);
242  colors[0] = AV_RL16(&s->buf[stream_ptr]);
243  stream_ptr += 2;
244  colors[1] = AV_RL16(&s->buf[stream_ptr]);
245  stream_ptr += 2;
246 
247  if (colors[0] & 0x8000) {
248  /* 8-color encoding */
249  CHECK_STREAM_PTR(12);
250  colors[2] = AV_RL16(&s->buf[stream_ptr]);
251  stream_ptr += 2;
252  colors[3] = AV_RL16(&s->buf[stream_ptr]);
253  stream_ptr += 2;
254  colors[4] = AV_RL16(&s->buf[stream_ptr]);
255  stream_ptr += 2;
256  colors[5] = AV_RL16(&s->buf[stream_ptr]);
257  stream_ptr += 2;
258  colors[6] = AV_RL16(&s->buf[stream_ptr]);
259  stream_ptr += 2;
260  colors[7] = AV_RL16(&s->buf[stream_ptr]);
261  stream_ptr += 2;
262 
263  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
264  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
265  pixels[pixel_ptr++] =
266  colors[((pixel_y & 0x2) << 1) +
267  (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
268  pixel_ptr -= row_dec;
269  }
270  } else {
271  /* 2-color encoding */
272  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
273  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
274  pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
275  pixel_ptr -= row_dec;
276  }
277  }
278  } else {
279  /* otherwise, it's a 1-color block */
280  colors[0] = (byte_b << 8) | byte_a;
281 
282  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
283  for (pixel_x = 0; pixel_x < 4; pixel_x++)
284  pixels[pixel_ptr++] = colors[0];
285  pixel_ptr -= row_dec;
286  }
287  }
288 
289  block_ptr += block_inc;
290  total_blocks--;
291  }
292  }
293 }
294 
296  void *data, int *got_frame,
297  AVPacket *avpkt)
298 {
299  const uint8_t *buf = avpkt->data;
300  int buf_size = avpkt->size;
301  Msvideo1Context *s = avctx->priv_data;
302  int ret;
303 
304  s->buf = buf;
305  s->size = buf_size;
306 
307  // Discard frame if its smaller than the minimum frame size
308  if (buf_size < (avctx->width/4) * (avctx->height/4) / 512) {
309  av_log(avctx, AV_LOG_ERROR, "Packet is too small\n");
310  return AVERROR_INVALIDDATA;
311  }
312 
313  if ((ret = ff_reget_buffer(avctx, s->frame, 0)) < 0)
314  return ret;
315 
316  if (s->mode_8bit) {
317  int size;
318  const uint8_t *pal = av_packet_get_side_data(avpkt, AV_PKT_DATA_PALETTE, &size);
319 
320  if (pal && size == AVPALETTE_SIZE) {
321  memcpy(s->pal, pal, AVPALETTE_SIZE);
322  s->frame->palette_has_changed = 1;
323  } else if (pal) {
324  av_log(avctx, AV_LOG_ERROR, "Palette size %d is wrong\n", size);
325  }
326  }
327 
328  if (s->mode_8bit)
330  else
332 
333  if ((ret = av_frame_ref(data, s->frame)) < 0)
334  return ret;
335 
336  *got_frame = 1;
337 
338  /* report that the buffer was completely consumed */
339  return buf_size;
340 }
341 
343 {
344  Msvideo1Context *s = avctx->priv_data;
345 
346  av_frame_free(&s->frame);
347 
348  return 0;
349 }
350 
352  .name = "msvideo1",
353  .long_name = NULL_IF_CONFIG_SMALL("Microsoft Video 1"),
354  .type = AVMEDIA_TYPE_VIDEO,
355  .id = AV_CODEC_ID_MSVIDEO1,
356  .priv_data_size = sizeof(Msvideo1Context),
358  .close = msvideo1_decode_end,
360  .capabilities = AV_CODEC_CAP_DR1,
361 };
uint32_t pal[256]
Definition: msvideo1.c:56
static int msvideo1_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: msvideo1.c:295
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
This structure describes decoded (raw) audio or video data.
Definition: frame.h:295
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:100
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
AVCodecContext * avctx
Definition: msvideo1.c:48
int size
Definition: avcodec.h:1481
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1778
AVCodec.
Definition: avcodec.h:3492
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:87
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
static av_cold int msvideo1_decode_end(AVCodecContext *avctx)
Definition: msvideo1.c:342
uint8_t
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:189
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
#define AVPALETTE_SIZE
Definition: pixfmt.h:32
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:443
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1669
uint8_t * data
Definition: avcodec.h:1480
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:2792
#define av_log(a,...)
const unsigned char * buf
Definition: msvideo1.c:51
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
An AV_PKT_DATA_PALETTE side data packet contains exactly AVPALETTE_SIZE bytes worth of palette...
Definition: avcodec.h:1193
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
Definition: avpacket.c:350
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:202
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:186
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Identical in function to ff_get_buffer(), except it reuses the existing buffer if available...
Definition: decode.c:2015
const char * name
Name of the codec implementation.
Definition: avcodec.h:3499
AVCodec ff_msvideo1_decoder
Definition: msvideo1.c:351
static void msvideo1_decode_16bit(Msvideo1Context *s)
Definition: msvideo1.c:186
common internal API header
int width
picture width / height.
Definition: avcodec.h:1741
#define s(width, name)
Definition: cbs_vp9.c:257
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:326
main external API structure.
Definition: avcodec.h:1568
int extradata_size
Definition: avcodec.h:1670
int palette_has_changed
Tell user application that palette has changed from previous frame.
Definition: frame.h:452
static void msvideo1_decode_8bit(Msvideo1Context *s)
Definition: msvideo1.c:86
#define CHECK_STREAM_PTR(n)
Definition: msvideo1.c:39
AVFrame * frame
Definition: msvideo1.c:49
#define flags(name, subs,...)
Definition: cbs_av1.c:561
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:309
static av_cold int msvideo1_decode_init(AVCodecContext *avctx)
Definition: msvideo1.c:59
GLint GLenum GLboolean GLsizei stride
Definition: opengl_enc.c:104
common internal api header.
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:375
void * priv_data
Definition: avcodec.h:1595
int pixels
Definition: avisynth_c.h:390
#define stride
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
This structure stores compressed data.
Definition: avcodec.h:1457
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:984
for(j=16;j >0;--j)