39 #define PALETTE_COUNT 256
40 #define CHECK_STREAM_PTR(n) \
41 if ((stream_ptr + n) > s->size ) { \
42 av_log(s->avctx, AV_LOG_ERROR, " MS Video-1 warning: stream_ptr out of bounds (%d >= %d)\n", \
43 stream_ptr + n, s->size); \
52 const unsigned char *
buf;
86 int block_ptr, pixel_ptr;
90 int blocks_wide, blocks_high;
96 unsigned char byte_a, byte_b;
99 unsigned char colors[8];
107 total_blocks = blocks_wide * blocks_high;
109 row_dec = stride + 4;
111 for (block_y = blocks_high; block_y > 0; block_y--) {
112 block_ptr = ((block_y * 4) - 1) *
stride;
113 for (block_x = blocks_wide; block_x > 0; block_x--) {
116 block_ptr += block_inc;
122 pixel_ptr = block_ptr;
126 byte_a = s->
buf[stream_ptr++];
127 byte_b = s->
buf[stream_ptr++];
130 if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0))
132 else if ((byte_b & 0xFC) == 0x84) {
134 skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
135 }
else if (byte_b < 0x80) {
137 flags = (byte_b << 8) | byte_a;
140 colors[0] = s->
buf[stream_ptr++];
141 colors[1] = s->
buf[stream_ptr++];
143 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
144 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
145 pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
146 pixel_ptr -= row_dec;
148 }
else if (byte_b >= 0x90) {
150 flags = (byte_b << 8) | byte_a;
153 memcpy(colors, &s->
buf[stream_ptr], 8);
156 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
157 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
158 pixels[pixel_ptr++] =
159 colors[((pixel_y & 0x2) << 1) +
160 (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
161 pixel_ptr -= row_dec;
167 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
168 for (pixel_x = 0; pixel_x < 4; pixel_x++)
169 pixels[pixel_ptr++] = colors[0];
170 pixel_ptr -= row_dec;
174 block_ptr += block_inc;
186 int block_ptr, pixel_ptr;
188 int pixel_x, pixel_y;
189 int block_x, block_y;
190 int blocks_wide, blocks_high;
196 unsigned char byte_a, byte_b;
197 unsigned short flags;
199 unsigned short colors[8];
207 total_blocks = blocks_wide * blocks_high;
211 for (block_y = blocks_high; block_y > 0; block_y--) {
212 block_ptr = ((block_y * 4) - 1) *
stride;
213 for (block_x = blocks_wide; block_x > 0; block_x--) {
216 block_ptr += block_inc;
222 pixel_ptr = block_ptr;
226 byte_a = s->
buf[stream_ptr++];
227 byte_b = s->
buf[stream_ptr++];
230 if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0)) {
232 }
else if ((byte_b & 0xFC) == 0x84) {
234 skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
235 }
else if (byte_b < 0x80) {
237 flags = (byte_b << 8) | byte_a;
245 if (colors[0] & 0x8000) {
261 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
262 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
263 pixels[pixel_ptr++] =
264 colors[((pixel_y & 0x2) << 1) +
265 (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
266 pixel_ptr -= row_dec;
270 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
271 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
272 pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
273 pixel_ptr -= row_dec;
278 colors[0] = (byte_b << 8) | byte_a;
280 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
281 for (pixel_x = 0; pixel_x < 4; pixel_x++)
282 pixels[pixel_ptr++] = colors[0];
283 pixel_ptr -= row_dec;
287 block_ptr += block_inc;
294 void *
data,
int *got_frame,
298 int buf_size = avpkt->
size;
static int msvideo1_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
This structure describes decoded (raw) audio or video data.
ptrdiff_t const GLvoid * data
static av_cold int init(AVCodecContext *avctx)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
static av_cold int msvideo1_decode_end(AVCodecContext *avctx)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
8 bit with AV_PIX_FMT_RGB32 palette
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
#define CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
const unsigned char * buf
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
const char * name
Name of the codec implementation.
Libavcodec external API header.
AVCodec ff_msvideo1_decoder
static void msvideo1_decode_16bit(Msvideo1Context *s)
common internal API header
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame)
Identical in function to av_frame_make_writable(), except it uses ff_get_buffer() to allocate the buf...
int width
picture width / height.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
main external API structure.
int palette_has_changed
Tell user application that palette has changed from previous frame.
static void msvideo1_decode_8bit(Msvideo1Context *s)
#define CHECK_STREAM_PTR(n)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static int decode(AVCodecContext *avctx, void *data, int *got_sub, AVPacket *avpkt)
static av_cold int msvideo1_decode_init(AVCodecContext *avctx)
GLint GLenum GLboolean GLsizei stride
common internal api header.
#define AV_PIX_FMT_RGB555
uint8_t * av_packet_get_side_data(AVPacket *pkt, enum AVPacketSideDataType type, int *size)
Get side information from packet.
This structure stores compressed data.