FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
msvideo1.c
Go to the documentation of this file.
1 /*
2  * Microsoft Video-1 Decoder
3  * Copyright (c) 2003 The FFmpeg Project
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Microsoft Video-1 Decoder by Mike Melanson (melanson@pcisys.net)
25  * For more information about the MS Video-1 format, visit:
26  * http://www.pcisys.net/~melanson/codecs/
27  *
28  */
29 
30 #include <stdio.h>
31 #include <stdlib.h>
32 #include <string.h>
33 
34 #include "libavutil/internal.h"
35 #include "libavutil/intreadwrite.h"
36 #include "avcodec.h"
37 #include "internal.h"
38 
39 #define PALETTE_COUNT 256
40 #define CHECK_STREAM_PTR(n) \
41  if ((stream_ptr + n) > s->size ) { \
42  av_log(s->avctx, AV_LOG_ERROR, " MS Video-1 warning: stream_ptr out of bounds (%d >= %d)\n", \
43  stream_ptr + n, s->size); \
44  return; \
45  }
46 
47 typedef struct Msvideo1Context {
48 
51 
52  const unsigned char *buf;
53  int size;
54 
55  int mode_8bit; /* if it's not 8-bit, it's 16-bit */
56 
57  uint32_t pal[256];
59 
61 {
62  Msvideo1Context *s = avctx->priv_data;
63 
64  s->avctx = avctx;
65 
66  /* figure out the colorspace based on the presence of a palette */
67  if (s->avctx->bits_per_coded_sample == 8) {
68  s->mode_8bit = 1;
69  avctx->pix_fmt = AV_PIX_FMT_PAL8;
70  if (avctx->extradata_size >= AVPALETTE_SIZE)
71  memcpy(s->pal, avctx->extradata, AVPALETTE_SIZE);
72  } else {
73  s->mode_8bit = 0;
74  avctx->pix_fmt = AV_PIX_FMT_RGB555;
75  }
76 
77  s->frame = av_frame_alloc();
78  if (!s->frame)
79  return AVERROR(ENOMEM);
80 
81  return 0;
82 }
83 
85 {
86  int block_ptr, pixel_ptr;
87  int total_blocks;
88  int pixel_x, pixel_y; /* pixel width and height iterators */
89  int block_x, block_y; /* block width and height iterators */
90  int blocks_wide, blocks_high; /* width and height in 4x4 blocks */
91  int block_inc;
92  int row_dec;
93 
94  /* decoding parameters */
95  int stream_ptr;
96  unsigned char byte_a, byte_b;
97  unsigned short flags;
98  int skip_blocks;
99  unsigned char colors[8];
100  unsigned char *pixels = s->frame->data[0];
101  int stride = s->frame->linesize[0];
102 
103  stream_ptr = 0;
104  skip_blocks = 0;
105  blocks_wide = s->avctx->width / 4;
106  blocks_high = s->avctx->height / 4;
107  total_blocks = blocks_wide * blocks_high;
108  block_inc = 4;
109  row_dec = stride + 4;
110 
111  for (block_y = blocks_high; block_y > 0; block_y--) {
112  block_ptr = ((block_y * 4) - 1) * stride;
113  for (block_x = blocks_wide; block_x > 0; block_x--) {
114  /* check if this block should be skipped */
115  if (skip_blocks) {
116  block_ptr += block_inc;
117  skip_blocks--;
118  total_blocks--;
119  continue;
120  }
121 
122  pixel_ptr = block_ptr;
123 
124  /* get the next two bytes in the encoded data stream */
125  CHECK_STREAM_PTR(2);
126  byte_a = s->buf[stream_ptr++];
127  byte_b = s->buf[stream_ptr++];
128 
129  /* check if the decode is finished */
130  if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0))
131  return;
132  else if ((byte_b & 0xFC) == 0x84) {
133  /* skip code, but don't count the current block */
134  skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
135  } else if (byte_b < 0x80) {
136  /* 2-color encoding */
137  flags = (byte_b << 8) | byte_a;
138 
139  CHECK_STREAM_PTR(2);
140  colors[0] = s->buf[stream_ptr++];
141  colors[1] = s->buf[stream_ptr++];
142 
143  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
144  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
145  pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
146  pixel_ptr -= row_dec;
147  }
148  } else if (byte_b >= 0x90) {
149  /* 8-color encoding */
150  flags = (byte_b << 8) | byte_a;
151 
152  CHECK_STREAM_PTR(8);
153  memcpy(colors, &s->buf[stream_ptr], 8);
154  stream_ptr += 8;
155 
156  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
157  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
158  pixels[pixel_ptr++] =
159  colors[((pixel_y & 0x2) << 1) +
160  (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
161  pixel_ptr -= row_dec;
162  }
163  } else {
164  /* 1-color encoding */
165  colors[0] = byte_a;
166 
167  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
168  for (pixel_x = 0; pixel_x < 4; pixel_x++)
169  pixels[pixel_ptr++] = colors[0];
170  pixel_ptr -= row_dec;
171  }
172  }
173 
174  block_ptr += block_inc;
175  total_blocks--;
176  }
177  }
178 
179  /* make the palette available on the way out */
180  if (s->avctx->pix_fmt == AV_PIX_FMT_PAL8)
181  memcpy(s->frame->data[1], s->pal, AVPALETTE_SIZE);
182 }
183 
185 {
186  int block_ptr, pixel_ptr;
187  int total_blocks;
188  int pixel_x, pixel_y; /* pixel width and height iterators */
189  int block_x, block_y; /* block width and height iterators */
190  int blocks_wide, blocks_high; /* width and height in 4x4 blocks */
191  int block_inc;
192  int row_dec;
193 
194  /* decoding parameters */
195  int stream_ptr;
196  unsigned char byte_a, byte_b;
197  unsigned short flags;
198  int skip_blocks;
199  unsigned short colors[8];
200  unsigned short *pixels = (unsigned short *)s->frame->data[0];
201  int stride = s->frame->linesize[0] / 2;
202 
203  stream_ptr = 0;
204  skip_blocks = 0;
205  blocks_wide = s->avctx->width / 4;
206  blocks_high = s->avctx->height / 4;
207  total_blocks = blocks_wide * blocks_high;
208  block_inc = 4;
209  row_dec = stride + 4;
210 
211  for (block_y = blocks_high; block_y > 0; block_y--) {
212  block_ptr = ((block_y * 4) - 1) * stride;
213  for (block_x = blocks_wide; block_x > 0; block_x--) {
214  /* check if this block should be skipped */
215  if (skip_blocks) {
216  block_ptr += block_inc;
217  skip_blocks--;
218  total_blocks--;
219  continue;
220  }
221 
222  pixel_ptr = block_ptr;
223 
224  /* get the next two bytes in the encoded data stream */
225  CHECK_STREAM_PTR(2);
226  byte_a = s->buf[stream_ptr++];
227  byte_b = s->buf[stream_ptr++];
228 
229  /* check if the decode is finished */
230  if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0)) {
231  return;
232  } else if ((byte_b & 0xFC) == 0x84) {
233  /* skip code, but don't count the current block */
234  skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
235  } else if (byte_b < 0x80) {
236  /* 2- or 8-color encoding modes */
237  flags = (byte_b << 8) | byte_a;
238 
239  CHECK_STREAM_PTR(4);
240  colors[0] = AV_RL16(&s->buf[stream_ptr]);
241  stream_ptr += 2;
242  colors[1] = AV_RL16(&s->buf[stream_ptr]);
243  stream_ptr += 2;
244 
245  if (colors[0] & 0x8000) {
246  /* 8-color encoding */
247  CHECK_STREAM_PTR(12);
248  colors[2] = AV_RL16(&s->buf[stream_ptr]);
249  stream_ptr += 2;
250  colors[3] = AV_RL16(&s->buf[stream_ptr]);
251  stream_ptr += 2;
252  colors[4] = AV_RL16(&s->buf[stream_ptr]);
253  stream_ptr += 2;
254  colors[5] = AV_RL16(&s->buf[stream_ptr]);
255  stream_ptr += 2;
256  colors[6] = AV_RL16(&s->buf[stream_ptr]);
257  stream_ptr += 2;
258  colors[7] = AV_RL16(&s->buf[stream_ptr]);
259  stream_ptr += 2;
260 
261  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
262  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
263  pixels[pixel_ptr++] =
264  colors[((pixel_y & 0x2) << 1) +
265  (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
266  pixel_ptr -= row_dec;
267  }
268  } else {
269  /* 2-color encoding */
270  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
271  for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
272  pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
273  pixel_ptr -= row_dec;
274  }
275  }
276  } else {
277  /* otherwise, it's a 1-color block */
278  colors[0] = (byte_b << 8) | byte_a;
279 
280  for (pixel_y = 0; pixel_y < 4; pixel_y++) {
281  for (pixel_x = 0; pixel_x < 4; pixel_x++)
282  pixels[pixel_ptr++] = colors[0];
283  pixel_ptr -= row_dec;
284  }
285  }
286 
287  block_ptr += block_inc;
288  total_blocks--;
289  }
290  }
291 }
292 
294  void *data, int *got_frame,
295  AVPacket *avpkt)
296 {
297  const uint8_t *buf = avpkt->data;
298  int buf_size = avpkt->size;
299  Msvideo1Context *s = avctx->priv_data;
300  int ret;
301 
302  s->buf = buf;
303  s->size = buf_size;
304 
305  if ((ret = ff_reget_buffer(avctx, s->frame)) < 0)
306  return ret;
307 
308  if (s->mode_8bit) {
310 
311  if (pal) {
312  memcpy(s->pal, pal, AVPALETTE_SIZE);
313  s->frame->palette_has_changed = 1;
314  }
315  }
316 
317  if (s->mode_8bit)
319  else
321 
322  if ((ret = av_frame_ref(data, s->frame)) < 0)
323  return ret;
324 
325  *got_frame = 1;
326 
327  /* report that the buffer was completely consumed */
328  return buf_size;
329 }
330 
332 {
333  Msvideo1Context *s = avctx->priv_data;
334 
335  av_frame_free(&s->frame);
336 
337  return 0;
338 }
339 
341  .name = "msvideo1",
342  .long_name = NULL_IF_CONFIG_SMALL("Microsoft Video 1"),
343  .type = AVMEDIA_TYPE_VIDEO,
344  .id = AV_CODEC_ID_MSVIDEO1,
345  .priv_data_size = sizeof(Msvideo1Context),
347  .close = msvideo1_decode_end,
349  .capabilities = AV_CODEC_CAP_DR1,
350 };
uint32_t pal[256]
Definition: msvideo1.c:57
#define NULL
Definition: coverity.c:32
static int msvideo1_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: msvideo1.c:293
const char * s
Definition: avisynth_c.h:631
This structure describes decoded (raw) audio or video data.
Definition: frame.h:181
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
AVCodecContext * avctx
Definition: msvideo1.c:49
int size
Definition: avcodec.h:1468
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1752
AVCodec.
Definition: avcodec.h:3392
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
Definition: bytestream.h:87
static av_cold int msvideo1_decode_end(AVCodecContext *avctx)
Definition: msvideo1.c:331
uint8_t
#define av_cold
Definition: attributes.h:82
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:141
8 bit with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:74
#define AVPALETTE_SIZE
Definition: pixfmt.h:33
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:375
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1647
uint8_t * data
Definition: avcodec.h:1467
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
Definition: avcodec.h:2917
const unsigned char * buf
Definition: msvideo1.c:52
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:154
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:176
const char * name
Name of the codec implementation.
Definition: avcodec.h:3399
AVCodec ff_msvideo1_decoder
Definition: msvideo1.c:340
static void msvideo1_decode_16bit(Msvideo1Context *s)
Definition: msvideo1.c:184
common internal API header
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame)
Identical in function to av_frame_make_writable(), except it uses ff_get_buffer() to allocate the buf...
Definition: utils.c:943
int width
picture width / height.
Definition: avcodec.h:1711
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:209
main external API structure.
Definition: avcodec.h:1532
void * buf
Definition: avisynth_c.h:553
int extradata_size
Definition: avcodec.h:1648
int palette_has_changed
Tell user application that palette has changed from previous frame.
Definition: frame.h:322
static void msvideo1_decode_8bit(Msvideo1Context *s)
Definition: msvideo1.c:84
#define CHECK_STREAM_PTR(n)
Definition: msvideo1.c:40
AVFrame * frame
Definition: msvideo1.c:50
static int flags
Definition: cpu.c:47
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:192
static int decode(AVCodecContext *avctx, void *data, int *got_sub, AVPacket *avpkt)
Definition: ccaption_dec.c:572
static av_cold int msvideo1_decode_init(AVCodecContext *avctx)
Definition: msvideo1.c:60
GLint GLenum GLboolean GLsizei stride
Definition: opengl_enc.c:105
common internal api header.
#define AV_PIX_FMT_RGB555
Definition: pixfmt.h:318
void * priv_data
Definition: avcodec.h:1574
int pixels
Definition: avisynth_c.h:298
uint8_t * av_packet_get_side_data(AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
Definition: avpacket.c:320
#define stride
This structure stores compressed data.
Definition: avcodec.h:1444
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:856
for(j=16;j >0;--j)