39 #define PALETTE_COUNT 256 40 #define CHECK_STREAM_PTR(n) \ 41 if ((stream_ptr + n) > s->size ) { \ 42 av_log(s->avctx, AV_LOG_ERROR, " MS Video-1 warning: stream_ptr out of bounds (%d >= %d)\n", \ 43 stream_ptr + n, s->size); \ 52 const unsigned char *
buf;
89 int block_ptr, pixel_ptr;
93 int blocks_wide, blocks_high;
99 unsigned char byte_a, byte_b;
100 unsigned short flags;
102 unsigned char colors[8];
103 unsigned char *pixels = s->
frame->
data[0];
110 total_blocks = blocks_wide * blocks_high;
112 row_dec = stride + 4;
114 for (block_y = blocks_high; block_y > 0; block_y--) {
115 block_ptr = ((block_y * 4) - 1) *
stride;
116 for (block_x = blocks_wide; block_x > 0; block_x--) {
119 block_ptr += block_inc;
125 pixel_ptr = block_ptr;
129 byte_a = s->
buf[stream_ptr++];
130 byte_b = s->
buf[stream_ptr++];
133 if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0))
135 else if ((byte_b & 0xFC) == 0x84) {
137 skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
138 }
else if (byte_b < 0x80) {
140 flags = (byte_b << 8) | byte_a;
143 colors[0] = s->
buf[stream_ptr++];
144 colors[1] = s->
buf[stream_ptr++];
146 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
147 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
148 pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
149 pixel_ptr -= row_dec;
151 }
else if (byte_b >= 0x90) {
153 flags = (byte_b << 8) | byte_a;
156 memcpy(colors, &s->
buf[stream_ptr], 8);
159 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
160 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
161 pixels[pixel_ptr++] =
162 colors[((pixel_y & 0x2) << 1) +
163 (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
164 pixel_ptr -= row_dec;
170 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
171 for (pixel_x = 0; pixel_x < 4; pixel_x++)
172 pixels[pixel_ptr++] = colors[0];
173 pixel_ptr -= row_dec;
177 block_ptr += block_inc;
189 int block_ptr, pixel_ptr;
191 int pixel_x, pixel_y;
192 int block_x, block_y;
193 int blocks_wide, blocks_high;
199 unsigned char byte_a, byte_b;
200 unsigned short flags;
202 unsigned short colors[8];
203 unsigned short *pixels = (
unsigned short *)s->
frame->
data[0];
210 total_blocks = blocks_wide * blocks_high;
214 for (block_y = blocks_high; block_y > 0; block_y--) {
215 block_ptr = ((block_y * 4) - 1) *
stride;
216 for (block_x = blocks_wide; block_x > 0; block_x--) {
219 block_ptr += block_inc;
225 pixel_ptr = block_ptr;
229 byte_a = s->
buf[stream_ptr++];
230 byte_b = s->
buf[stream_ptr++];
233 if ((byte_a == 0) && (byte_b == 0) && (total_blocks == 0)) {
235 }
else if ((byte_b & 0xFC) == 0x84) {
237 skip_blocks = ((byte_b - 0x84) << 8) + byte_a - 1;
238 }
else if (byte_b < 0x80) {
240 flags = (byte_b << 8) | byte_a;
248 if (colors[0] & 0x8000) {
264 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
265 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
266 pixels[pixel_ptr++] =
267 colors[((pixel_y & 0x2) << 1) +
268 (pixel_x & 0x2) + ((flags & 0x1) ^ 1)];
269 pixel_ptr -= row_dec;
273 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
274 for (pixel_x = 0; pixel_x < 4; pixel_x++, flags >>= 1)
275 pixels[pixel_ptr++] = colors[(flags & 0x1) ^ 1];
276 pixel_ptr -= row_dec;
281 colors[0] = (byte_b << 8) | byte_a;
283 for (pixel_y = 0; pixel_y < 4; pixel_y++) {
284 for (pixel_x = 0; pixel_x < 4; pixel_x++)
285 pixels[pixel_ptr++] = colors[0];
286 pixel_ptr -= row_dec;
290 block_ptr += block_inc;
297 void *
data,
int *got_frame,
301 int buf_size = avpkt->
size;
309 if (buf_size < (avctx->
width/4) * (avctx->
height/4) / 512) {
static int msvideo1_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
This structure describes decoded (raw) audio or video data.
ptrdiff_t const GLvoid * data
int ff_copy_palette(void *dst, const AVPacket *src, void *logctx)
Check whether the side-data of src contains a palette of size AVPALETTE_SIZE; if so, copy it to dst and return 1; else return 0.
static av_cold int init(AVCodecContext *avctx)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_RL16
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
static av_cold int msvideo1_decode_end(AVCodecContext *avctx)
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
8 bits with AV_PIX_FMT_RGB32 palette
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
int bits_per_coded_sample
bits per sample/pixel from the demuxer (needed for huffyuv).
const unsigned char * buf
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Identical in function to ff_get_buffer(), except it reuses the existing buffer if available...
const char * name
Name of the codec implementation.
AVCodec ff_msvideo1_decoder
static void msvideo1_decode_16bit(Msvideo1Context *s)
common internal API header
int width
picture width / height.
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
main external API structure.
int palette_has_changed
Tell user application that palette has changed from previous frame.
static void msvideo1_decode_8bit(Msvideo1Context *s)
#define CHECK_STREAM_PTR(n)
#define flags(name, subs,...)
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static av_cold int msvideo1_decode_init(AVCodecContext *avctx)
GLint GLenum GLboolean GLsizei stride
common internal api header.
#define AV_PIX_FMT_RGB555
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Frame references ownership and permissions
This structure stores compressed data.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators...