00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #include "avcodec.h"
00022 #include "bytestream.h"
00023 #include "pnm.h"
00024
00025
00026 static av_cold int common_init(AVCodecContext *avctx){
00027 PNMContext *s = avctx->priv_data;
00028
00029 avcodec_get_frame_defaults((AVFrame*)&s->picture);
00030 avctx->coded_frame= (AVFrame*)&s->picture;
00031
00032 return 0;
00033 }
00034
00035 static int pnm_decode_frame(AVCodecContext *avctx,
00036 void *data, int *data_size,
00037 const uint8_t *buf, int buf_size)
00038 {
00039 PNMContext * const s = avctx->priv_data;
00040 AVFrame *picture = data;
00041 AVFrame * const p= (AVFrame*)&s->picture;
00042 int i, n, linesize, h, upgrade = 0;
00043 unsigned char *ptr;
00044
00045 s->bytestream_start=
00046 s->bytestream= buf;
00047 s->bytestream_end= buf + buf_size;
00048
00049 if(ff_pnm_decode_header(avctx, s) < 0)
00050 return -1;
00051
00052 if(p->data[0])
00053 avctx->release_buffer(avctx, p);
00054
00055 p->reference= 0;
00056 if(avctx->get_buffer(avctx, p) < 0){
00057 av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
00058 return -1;
00059 }
00060 p->pict_type= FF_I_TYPE;
00061 p->key_frame= 1;
00062
00063 switch(avctx->pix_fmt) {
00064 default:
00065 return -1;
00066 case PIX_FMT_RGB48BE:
00067 n = avctx->width * 6;
00068 goto do_read;
00069 case PIX_FMT_RGB24:
00070 n = avctx->width * 3;
00071 goto do_read;
00072 case PIX_FMT_GRAY8:
00073 n = avctx->width;
00074 if (s->maxval < 255)
00075 upgrade = 1;
00076 goto do_read;
00077 case PIX_FMT_GRAY16BE:
00078 case PIX_FMT_GRAY16LE:
00079 n = avctx->width * 2;
00080 if (s->maxval < 65535)
00081 upgrade = 2;
00082 goto do_read;
00083 case PIX_FMT_MONOWHITE:
00084 case PIX_FMT_MONOBLACK:
00085 n = (avctx->width + 7) >> 3;
00086 do_read:
00087 ptr = p->data[0];
00088 linesize = p->linesize[0];
00089 if(s->bytestream + n*avctx->height > s->bytestream_end)
00090 return -1;
00091 for(i = 0; i < avctx->height; i++) {
00092 if (!upgrade)
00093 memcpy(ptr, s->bytestream, n);
00094 else if (upgrade == 1) {
00095 unsigned int j, f = (255*128 + s->maxval/2) / s->maxval;
00096 for (j=0; j<n; j++)
00097 ptr[j] = (s->bytestream[j] * f + 64) >> 7;
00098 } else if (upgrade == 2) {
00099 unsigned int j, v, f = (65535*32768 + s->maxval/2) / s->maxval;
00100 for (j=0; j<n/2; j++) {
00101 v = be2me_16(((uint16_t *)s->bytestream)[j]);
00102 ((uint16_t *)ptr)[j] = (v * f + 16384) >> 15;
00103 }
00104 }
00105 s->bytestream += n;
00106 ptr += linesize;
00107 }
00108 break;
00109 case PIX_FMT_YUV420P:
00110 {
00111 unsigned char *ptr1, *ptr2;
00112
00113 n = avctx->width;
00114 ptr = p->data[0];
00115 linesize = p->linesize[0];
00116 if(s->bytestream + n*avctx->height*3/2 > s->bytestream_end)
00117 return -1;
00118 for(i = 0; i < avctx->height; i++) {
00119 memcpy(ptr, s->bytestream, n);
00120 s->bytestream += n;
00121 ptr += linesize;
00122 }
00123 ptr1 = p->data[1];
00124 ptr2 = p->data[2];
00125 n >>= 1;
00126 h = avctx->height >> 1;
00127 for(i = 0; i < h; i++) {
00128 memcpy(ptr1, s->bytestream, n);
00129 s->bytestream += n;
00130 memcpy(ptr2, s->bytestream, n);
00131 s->bytestream += n;
00132 ptr1 += p->linesize[1];
00133 ptr2 += p->linesize[2];
00134 }
00135 }
00136 break;
00137 case PIX_FMT_RGB32:
00138 ptr = p->data[0];
00139 linesize = p->linesize[0];
00140 if(s->bytestream + avctx->width*avctx->height*4 > s->bytestream_end)
00141 return -1;
00142 for(i = 0; i < avctx->height; i++) {
00143 int j, r, g, b, a;
00144
00145 for(j = 0;j < avctx->width; j++) {
00146 r = *s->bytestream++;
00147 g = *s->bytestream++;
00148 b = *s->bytestream++;
00149 a = *s->bytestream++;
00150 ((uint32_t *)ptr)[j] = (a << 24) | (r << 16) | (g << 8) | b;
00151 }
00152 ptr += linesize;
00153 }
00154 break;
00155 }
00156 *picture= *(AVFrame*)&s->picture;
00157 *data_size = sizeof(AVPicture);
00158
00159 return s->bytestream - s->bytestream_start;
00160 }
00161
00162 static int pnm_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
00163 PNMContext *s = avctx->priv_data;
00164 AVFrame *pict = data;
00165 AVFrame * const p= (AVFrame*)&s->picture;
00166 int i, h, h1, c, n, linesize;
00167 uint8_t *ptr, *ptr1, *ptr2;
00168
00169 if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
00170 av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
00171 return -1;
00172 }
00173
00174 *p = *pict;
00175 p->pict_type= FF_I_TYPE;
00176 p->key_frame= 1;
00177
00178 s->bytestream_start=
00179 s->bytestream= outbuf;
00180 s->bytestream_end= outbuf+buf_size;
00181
00182 h = avctx->height;
00183 h1 = h;
00184 switch(avctx->pix_fmt) {
00185 case PIX_FMT_MONOWHITE:
00186 c = '4';
00187 n = (avctx->width + 7) >> 3;
00188 break;
00189 case PIX_FMT_GRAY8:
00190 c = '5';
00191 n = avctx->width;
00192 break;
00193 case PIX_FMT_GRAY16BE:
00194 c = '5';
00195 n = avctx->width * 2;
00196 break;
00197 case PIX_FMT_RGB24:
00198 c = '6';
00199 n = avctx->width * 3;
00200 break;
00201 case PIX_FMT_RGB48BE:
00202 c = '6';
00203 n = avctx->width * 6;
00204 break;
00205 case PIX_FMT_YUV420P:
00206 c = '5';
00207 n = avctx->width;
00208 h1 = (h * 3) / 2;
00209 break;
00210 default:
00211 return -1;
00212 }
00213 snprintf(s->bytestream, s->bytestream_end - s->bytestream,
00214 "P%c\n%d %d\n",
00215 c, avctx->width, h1);
00216 s->bytestream += strlen(s->bytestream);
00217 if (avctx->pix_fmt != PIX_FMT_MONOWHITE) {
00218 snprintf(s->bytestream, s->bytestream_end - s->bytestream,
00219 "%d\n", (avctx->pix_fmt != PIX_FMT_GRAY16BE && avctx->pix_fmt != PIX_FMT_RGB48BE) ? 255 : 65535);
00220 s->bytestream += strlen(s->bytestream);
00221 }
00222
00223 ptr = p->data[0];
00224 linesize = p->linesize[0];
00225 for(i=0;i<h;i++) {
00226 memcpy(s->bytestream, ptr, n);
00227 s->bytestream += n;
00228 ptr += linesize;
00229 }
00230
00231 if (avctx->pix_fmt == PIX_FMT_YUV420P) {
00232 h >>= 1;
00233 n >>= 1;
00234 ptr1 = p->data[1];
00235 ptr2 = p->data[2];
00236 for(i=0;i<h;i++) {
00237 memcpy(s->bytestream, ptr1, n);
00238 s->bytestream += n;
00239 memcpy(s->bytestream, ptr2, n);
00240 s->bytestream += n;
00241 ptr1 += p->linesize[1];
00242 ptr2 += p->linesize[2];
00243 }
00244 }
00245 return s->bytestream - s->bytestream_start;
00246 }
00247
00248 static int pam_encode_frame(AVCodecContext *avctx, unsigned char *outbuf, int buf_size, void *data){
00249 PNMContext *s = avctx->priv_data;
00250 AVFrame *pict = data;
00251 AVFrame * const p= (AVFrame*)&s->picture;
00252 int i, h, w, n, linesize, depth, maxval;
00253 const char *tuple_type;
00254 uint8_t *ptr;
00255
00256 if(buf_size < avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height) + 200){
00257 av_log(avctx, AV_LOG_ERROR, "encoded frame too large\n");
00258 return -1;
00259 }
00260
00261 *p = *pict;
00262 p->pict_type= FF_I_TYPE;
00263 p->key_frame= 1;
00264
00265 s->bytestream_start=
00266 s->bytestream= outbuf;
00267 s->bytestream_end= outbuf+buf_size;
00268
00269 h = avctx->height;
00270 w = avctx->width;
00271 switch(avctx->pix_fmt) {
00272 case PIX_FMT_MONOWHITE:
00273 n = (w + 7) >> 3;
00274 depth = 1;
00275 maxval = 1;
00276 tuple_type = "BLACKANDWHITE";
00277 break;
00278 case PIX_FMT_GRAY8:
00279 n = w;
00280 depth = 1;
00281 maxval = 255;
00282 tuple_type = "GRAYSCALE";
00283 break;
00284 case PIX_FMT_RGB24:
00285 n = w * 3;
00286 depth = 3;
00287 maxval = 255;
00288 tuple_type = "RGB";
00289 break;
00290 case PIX_FMT_RGB32:
00291 n = w * 4;
00292 depth = 4;
00293 maxval = 255;
00294 tuple_type = "RGB_ALPHA";
00295 break;
00296 default:
00297 return -1;
00298 }
00299 snprintf(s->bytestream, s->bytestream_end - s->bytestream,
00300 "P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\nTUPLETYPE %s\nENDHDR\n",
00301 w, h, depth, maxval, tuple_type);
00302 s->bytestream += strlen(s->bytestream);
00303
00304 ptr = p->data[0];
00305 linesize = p->linesize[0];
00306
00307 if (avctx->pix_fmt == PIX_FMT_RGB32) {
00308 int j;
00309 unsigned int v;
00310
00311 for(i=0;i<h;i++) {
00312 for(j=0;j<w;j++) {
00313 v = ((uint32_t *)ptr)[j];
00314 bytestream_put_be24(&s->bytestream, v);
00315 *s->bytestream++ = v >> 24;
00316 }
00317 ptr += linesize;
00318 }
00319 } else {
00320 for(i=0;i<h;i++) {
00321 memcpy(s->bytestream, ptr, n);
00322 s->bytestream += n;
00323 ptr += linesize;
00324 }
00325 }
00326 return s->bytestream - s->bytestream_start;
00327 }
00328
00329 #if 0
00330 static int pnm_probe(AVProbeData *pd)
00331 {
00332 const char *p = pd->buf;
00333 if (pd->buf_size >= 8 &&
00334 p[0] == 'P' &&
00335 p[1] >= '4' && p[1] <= '6' &&
00336 pnm_space(p[2]) )
00337 return AVPROBE_SCORE_MAX - 1;
00338 else
00339 return 0;
00340 }
00341
00342 static int pgmyuv_probe(AVProbeData *pd)
00343 {
00344 if (match_ext(pd->filename, "pgmyuv"))
00345 return AVPROBE_SCORE_MAX;
00346 else
00347 return 0;
00348 }
00349
00350 static int pam_probe(AVProbeData *pd)
00351 {
00352 const char *p = pd->buf;
00353 if (pd->buf_size >= 8 &&
00354 p[0] == 'P' &&
00355 p[1] == '7' &&
00356 p[2] == '\n')
00357 return AVPROBE_SCORE_MAX;
00358 else
00359 return 0;
00360 }
00361 #endif
00362
00363
00364 #if CONFIG_PGM_ENCODER
00365 AVCodec pgm_encoder = {
00366 "pgm",
00367 CODEC_TYPE_VIDEO,
00368 CODEC_ID_PGM,
00369 sizeof(PNMContext),
00370 common_init,
00371 pnm_encode_frame,
00372 NULL,
00373 pnm_decode_frame,
00374 .pix_fmts= (enum PixelFormat[]){PIX_FMT_GRAY8, PIX_FMT_GRAY16BE, PIX_FMT_NONE},
00375 .long_name= NULL_IF_CONFIG_SMALL("PGM (Portable GrayMap) image"),
00376 };
00377 #endif // CONFIG_PGM_ENCODER
00378
00379 #if CONFIG_PGMYUV_ENCODER
00380 AVCodec pgmyuv_encoder = {
00381 "pgmyuv",
00382 CODEC_TYPE_VIDEO,
00383 CODEC_ID_PGMYUV,
00384 sizeof(PNMContext),
00385 common_init,
00386 pnm_encode_frame,
00387 NULL,
00388 pnm_decode_frame,
00389 .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_NONE},
00390 .long_name= NULL_IF_CONFIG_SMALL("PGMYUV (Portable GrayMap YUV) image"),
00391 };
00392 #endif // CONFIG_PGMYUV_ENCODER
00393
00394 #if CONFIG_PPM_ENCODER
00395 AVCodec ppm_encoder = {
00396 "ppm",
00397 CODEC_TYPE_VIDEO,
00398 CODEC_ID_PPM,
00399 sizeof(PNMContext),
00400 common_init,
00401 pnm_encode_frame,
00402 NULL,
00403 pnm_decode_frame,
00404 .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB48BE, PIX_FMT_NONE},
00405 .long_name= NULL_IF_CONFIG_SMALL("PPM (Portable PixelMap) image"),
00406 };
00407 #endif // CONFIG_PPM_ENCODER
00408
00409 #if CONFIG_PBM_ENCODER
00410 AVCodec pbm_encoder = {
00411 "pbm",
00412 CODEC_TYPE_VIDEO,
00413 CODEC_ID_PBM,
00414 sizeof(PNMContext),
00415 common_init,
00416 pnm_encode_frame,
00417 NULL,
00418 pnm_decode_frame,
00419 .pix_fmts= (enum PixelFormat[]){PIX_FMT_MONOWHITE, PIX_FMT_NONE},
00420 .long_name= NULL_IF_CONFIG_SMALL("PBM (Portable BitMap) image"),
00421 };
00422 #endif // CONFIG_PBM_ENCODER
00423
00424 #if CONFIG_PAM_ENCODER
00425 AVCodec pam_encoder = {
00426 "pam",
00427 CODEC_TYPE_VIDEO,
00428 CODEC_ID_PAM,
00429 sizeof(PNMContext),
00430 common_init,
00431 pam_encode_frame,
00432 NULL,
00433 pnm_decode_frame,
00434 .pix_fmts= (enum PixelFormat[]){PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_GRAY8, PIX_FMT_MONOWHITE, PIX_FMT_NONE},
00435 .long_name= NULL_IF_CONFIG_SMALL("PAM (Portable AnyMap) image"),
00436 };
00437 #endif // CONFIG_PAM_ENCODER