00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00031 #include <stdlib.h>
00032 #include <stdio.h>
00033 #include <string.h>
00034 #include <math.h>
00035
00036 #include "libavutil/mathematics.h"
00037 #include "libavformat/avformat.h"
00038 #include "libswscale/swscale.h"
00039
00040 #undef exit
00041
00042
00043 #define STREAM_DURATION 200.0
00044 #define STREAM_FRAME_RATE 25
00045 #define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
00046 #define STREAM_PIX_FMT PIX_FMT_YUV420P
00047
00048 static int sws_flags = SWS_BICUBIC;
00049
00050
00051
00052
00053 static float t, tincr, tincr2;
00054 static int16_t *samples;
00055 static uint8_t *audio_outbuf;
00056 static int audio_outbuf_size;
00057 static int audio_input_frame_size;
00058
00059
00060
00061
00062 static AVStream *add_audio_stream(AVFormatContext *oc, enum CodecID codec_id)
00063 {
00064 AVCodecContext *c;
00065 AVStream *st;
00066
00067 st = avformat_new_stream(oc, NULL);
00068 if (!st) {
00069 fprintf(stderr, "Could not alloc stream\n");
00070 exit(1);
00071 }
00072 st->id = 1;
00073
00074 c = st->codec;
00075 c->codec_id = codec_id;
00076 c->codec_type = AVMEDIA_TYPE_AUDIO;
00077
00078
00079 c->sample_fmt = AV_SAMPLE_FMT_S16;
00080 c->bit_rate = 64000;
00081 c->sample_rate = 44100;
00082 c->channels = 2;
00083
00084
00085 if (oc->oformat->flags & AVFMT_GLOBALHEADER)
00086 c->flags |= CODEC_FLAG_GLOBAL_HEADER;
00087
00088 return st;
00089 }
00090
00091 static void open_audio(AVFormatContext *oc, AVStream *st)
00092 {
00093 AVCodecContext *c;
00094 AVCodec *codec;
00095
00096 c = st->codec;
00097
00098
00099 codec = avcodec_find_encoder(c->codec_id);
00100 if (!codec) {
00101 fprintf(stderr, "codec not found\n");
00102 exit(1);
00103 }
00104
00105
00106 if (avcodec_open(c, codec) < 0) {
00107 fprintf(stderr, "could not open codec\n");
00108 exit(1);
00109 }
00110
00111
00112 t = 0;
00113 tincr = 2 * M_PI * 110.0 / c->sample_rate;
00114
00115 tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
00116
00117 audio_outbuf_size = 10000;
00118 audio_outbuf = av_malloc(audio_outbuf_size);
00119
00120
00121
00122 if (c->frame_size <= 1) {
00123 audio_input_frame_size = audio_outbuf_size / c->channels;
00124 switch(st->codec->codec_id) {
00125 case CODEC_ID_PCM_S16LE:
00126 case CODEC_ID_PCM_S16BE:
00127 case CODEC_ID_PCM_U16LE:
00128 case CODEC_ID_PCM_U16BE:
00129 audio_input_frame_size >>= 1;
00130 break;
00131 default:
00132 break;
00133 }
00134 } else {
00135 audio_input_frame_size = c->frame_size;
00136 }
00137 samples = av_malloc(audio_input_frame_size * 2 * c->channels);
00138 }
00139
00140
00141
00142 static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
00143 {
00144 int j, i, v;
00145 int16_t *q;
00146
00147 q = samples;
00148 for (j = 0; j < frame_size; j++) {
00149 v = (int)(sin(t) * 10000);
00150 for(i = 0; i < nb_channels; i++)
00151 *q++ = v;
00152 t += tincr;
00153 tincr += tincr2;
00154 }
00155 }
00156
00157 static void write_audio_frame(AVFormatContext *oc, AVStream *st)
00158 {
00159 AVCodecContext *c;
00160 AVPacket pkt;
00161 av_init_packet(&pkt);
00162
00163 c = st->codec;
00164
00165 get_audio_frame(samples, audio_input_frame_size, c->channels);
00166
00167 pkt.size = avcodec_encode_audio(c, audio_outbuf, audio_outbuf_size, samples);
00168
00169 if (c->coded_frame && c->coded_frame->pts != AV_NOPTS_VALUE)
00170 pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
00171 pkt.flags |= AV_PKT_FLAG_KEY;
00172 pkt.stream_index = st->index;
00173 pkt.data = audio_outbuf;
00174
00175
00176 if (av_interleaved_write_frame(oc, &pkt) != 0) {
00177 fprintf(stderr, "Error while writing audio frame\n");
00178 exit(1);
00179 }
00180 }
00181
00182 static void close_audio(AVFormatContext *oc, AVStream *st)
00183 {
00184 avcodec_close(st->codec);
00185
00186 av_free(samples);
00187 av_free(audio_outbuf);
00188 }
00189
00190
00191
00192
00193 static AVFrame *picture, *tmp_picture;
00194 static uint8_t *video_outbuf;
00195 static int frame_count, video_outbuf_size;
00196
00197
00198 static AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id)
00199 {
00200 AVCodecContext *c;
00201 AVStream *st;
00202 AVCodec *codec;
00203
00204 st = avformat_new_stream(oc, NULL);
00205 if (!st) {
00206 fprintf(stderr, "Could not alloc stream\n");
00207 exit(1);
00208 }
00209
00210 c = st->codec;
00211
00212
00213 codec = avcodec_find_encoder(codec_id);
00214 if (!codec) {
00215 fprintf(stderr, "codec not found\n");
00216 exit(1);
00217 }
00218 avcodec_get_context_defaults3(c, codec);
00219
00220 c->codec_id = codec_id;
00221
00222
00223 c->bit_rate = 400000;
00224
00225 c->width = 352;
00226 c->height = 288;
00227
00228
00229
00230
00231 c->time_base.den = STREAM_FRAME_RATE;
00232 c->time_base.num = 1;
00233 c->gop_size = 12;
00234 c->pix_fmt = STREAM_PIX_FMT;
00235 if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
00236
00237 c->max_b_frames = 2;
00238 }
00239 if (c->codec_id == CODEC_ID_MPEG1VIDEO){
00240
00241
00242
00243 c->mb_decision=2;
00244 }
00245
00246 if (oc->oformat->flags & AVFMT_GLOBALHEADER)
00247 c->flags |= CODEC_FLAG_GLOBAL_HEADER;
00248
00249 return st;
00250 }
00251
00252 static AVFrame *alloc_picture(enum PixelFormat pix_fmt, int width, int height)
00253 {
00254 AVFrame *picture;
00255 uint8_t *picture_buf;
00256 int size;
00257
00258 picture = avcodec_alloc_frame();
00259 if (!picture)
00260 return NULL;
00261 size = avpicture_get_size(pix_fmt, width, height);
00262 picture_buf = av_malloc(size);
00263 if (!picture_buf) {
00264 av_free(picture);
00265 return NULL;
00266 }
00267 avpicture_fill((AVPicture *)picture, picture_buf,
00268 pix_fmt, width, height);
00269 return picture;
00270 }
00271
00272 static void open_video(AVFormatContext *oc, AVStream *st)
00273 {
00274 AVCodec *codec;
00275 AVCodecContext *c;
00276
00277 c = st->codec;
00278
00279
00280 codec = avcodec_find_encoder(c->codec_id);
00281 if (!codec) {
00282 fprintf(stderr, "codec not found\n");
00283 exit(1);
00284 }
00285
00286
00287 if (avcodec_open(c, codec) < 0) {
00288 fprintf(stderr, "could not open codec\n");
00289 exit(1);
00290 }
00291
00292 video_outbuf = NULL;
00293 if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
00294
00295
00296
00297
00298
00299
00300 video_outbuf_size = 200000;
00301 video_outbuf = av_malloc(video_outbuf_size);
00302 }
00303
00304
00305 picture = alloc_picture(c->pix_fmt, c->width, c->height);
00306 if (!picture) {
00307 fprintf(stderr, "Could not allocate picture\n");
00308 exit(1);
00309 }
00310
00311
00312
00313
00314 tmp_picture = NULL;
00315 if (c->pix_fmt != PIX_FMT_YUV420P) {
00316 tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);
00317 if (!tmp_picture) {
00318 fprintf(stderr, "Could not allocate temporary picture\n");
00319 exit(1);
00320 }
00321 }
00322 }
00323
00324
00325 static void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
00326 {
00327 int x, y, i;
00328
00329 i = frame_index;
00330
00331
00332 for (y = 0; y < height; y++) {
00333 for (x = 0; x < width; x++) {
00334 pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;
00335 }
00336 }
00337
00338
00339 for (y = 0; y < height/2; y++) {
00340 for (x = 0; x < width/2; x++) {
00341 pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
00342 pict->data[2][y * pict->linesize[2] + x] = 64 + x + i * 5;
00343 }
00344 }
00345 }
00346
00347 static void write_video_frame(AVFormatContext *oc, AVStream *st)
00348 {
00349 int out_size, ret;
00350 AVCodecContext *c;
00351 static struct SwsContext *img_convert_ctx;
00352
00353 c = st->codec;
00354
00355 if (frame_count >= STREAM_NB_FRAMES) {
00356
00357
00358
00359 } else {
00360 if (c->pix_fmt != PIX_FMT_YUV420P) {
00361
00362
00363 if (img_convert_ctx == NULL) {
00364 img_convert_ctx = sws_getContext(c->width, c->height,
00365 PIX_FMT_YUV420P,
00366 c->width, c->height,
00367 c->pix_fmt,
00368 sws_flags, NULL, NULL, NULL);
00369 if (img_convert_ctx == NULL) {
00370 fprintf(stderr, "Cannot initialize the conversion context\n");
00371 exit(1);
00372 }
00373 }
00374 fill_yuv_image(tmp_picture, frame_count, c->width, c->height);
00375 sws_scale(img_convert_ctx, tmp_picture->data, tmp_picture->linesize,
00376 0, c->height, picture->data, picture->linesize);
00377 } else {
00378 fill_yuv_image(picture, frame_count, c->width, c->height);
00379 }
00380 }
00381
00382
00383 if (oc->oformat->flags & AVFMT_RAWPICTURE) {
00384
00385
00386 AVPacket pkt;
00387 av_init_packet(&pkt);
00388
00389 pkt.flags |= AV_PKT_FLAG_KEY;
00390 pkt.stream_index = st->index;
00391 pkt.data = (uint8_t *)picture;
00392 pkt.size = sizeof(AVPicture);
00393
00394 ret = av_interleaved_write_frame(oc, &pkt);
00395 } else {
00396
00397 out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
00398
00399 if (out_size > 0) {
00400 AVPacket pkt;
00401 av_init_packet(&pkt);
00402
00403 if (c->coded_frame->pts != AV_NOPTS_VALUE)
00404 pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
00405 if(c->coded_frame->key_frame)
00406 pkt.flags |= AV_PKT_FLAG_KEY;
00407 pkt.stream_index = st->index;
00408 pkt.data = video_outbuf;
00409 pkt.size = out_size;
00410
00411
00412 ret = av_interleaved_write_frame(oc, &pkt);
00413 } else {
00414 ret = 0;
00415 }
00416 }
00417 if (ret != 0) {
00418 fprintf(stderr, "Error while writing video frame\n");
00419 exit(1);
00420 }
00421 frame_count++;
00422 }
00423
00424 static void close_video(AVFormatContext *oc, AVStream *st)
00425 {
00426 avcodec_close(st->codec);
00427 av_free(picture->data[0]);
00428 av_free(picture);
00429 if (tmp_picture) {
00430 av_free(tmp_picture->data[0]);
00431 av_free(tmp_picture);
00432 }
00433 av_free(video_outbuf);
00434 }
00435
00436
00437
00438
00439 int main(int argc, char **argv)
00440 {
00441 const char *filename;
00442 AVOutputFormat *fmt;
00443 AVFormatContext *oc;
00444 AVStream *audio_st, *video_st;
00445 double audio_pts, video_pts;
00446 int i;
00447
00448
00449 av_register_all();
00450
00451 if (argc != 2) {
00452 printf("usage: %s output_file\n"
00453 "API example program to output a media file with libavformat.\n"
00454 "The output format is automatically guessed according to the file extension.\n"
00455 "Raw images can also be output by using '%%d' in the filename\n"
00456 "\n", argv[0]);
00457 return 1;
00458 }
00459
00460 filename = argv[1];
00461
00462
00463 avformat_alloc_output_context2(&oc, NULL, NULL, filename);
00464 if (!oc) {
00465 printf("Could not deduce output format from file extension: using MPEG.\n");
00466 avformat_alloc_output_context2(&oc, NULL, "mpeg", filename);
00467 }
00468 if (!oc) {
00469 return 1;
00470 }
00471 fmt = oc->oformat;
00472
00473
00474
00475 video_st = NULL;
00476 audio_st = NULL;
00477 if (fmt->video_codec != CODEC_ID_NONE) {
00478 video_st = add_video_stream(oc, fmt->video_codec);
00479 }
00480 if (fmt->audio_codec != CODEC_ID_NONE) {
00481 audio_st = add_audio_stream(oc, fmt->audio_codec);
00482 }
00483
00484 av_dump_format(oc, 0, filename, 1);
00485
00486
00487
00488 if (video_st)
00489 open_video(oc, video_st);
00490 if (audio_st)
00491 open_audio(oc, audio_st);
00492
00493
00494 if (!(fmt->flags & AVFMT_NOFILE)) {
00495 if (avio_open(&oc->pb, filename, AVIO_FLAG_WRITE) < 0) {
00496 fprintf(stderr, "Could not open '%s'\n", filename);
00497 return 1;
00498 }
00499 }
00500
00501
00502 av_write_header(oc);
00503 picture->pts = 0;
00504 for(;;) {
00505
00506 if (audio_st)
00507 audio_pts = (double)audio_st->pts.val * audio_st->time_base.num / audio_st->time_base.den;
00508 else
00509 audio_pts = 0.0;
00510
00511 if (video_st)
00512 video_pts = (double)video_st->pts.val * video_st->time_base.num / video_st->time_base.den;
00513 else
00514 video_pts = 0.0;
00515
00516 if ((!audio_st || audio_pts >= STREAM_DURATION) &&
00517 (!video_st || video_pts >= STREAM_DURATION))
00518 break;
00519
00520
00521 if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
00522 write_audio_frame(oc, audio_st);
00523 } else {
00524 write_video_frame(oc, video_st);
00525 picture->pts++;
00526 }
00527 }
00528
00529
00530
00531
00532
00533 av_write_trailer(oc);
00534
00535
00536 if (video_st)
00537 close_video(oc, video_st);
00538 if (audio_st)
00539 close_audio(oc, audio_st);
00540
00541
00542 for(i = 0; i < oc->nb_streams; i++) {
00543 av_freep(&oc->streams[i]->codec);
00544 av_freep(&oc->streams[i]);
00545 }
00546
00547 if (!(fmt->flags & AVFMT_NOFILE)) {
00548
00549 avio_close(oc->pb);
00550 }
00551
00552
00553 av_free(oc);
00554
00555 return 0;
00556 }