Hi all,<div>I am creating an cocoa application.I am using the ffmpeg library.</div><div>In which I have to encode a yuv to mp4 using h264 codec.</div><div>I have searched a lot but could not find the actual solution for encoding.</div>
<div><br></div><div>There are the steps, I am following:</div><div><br></div><div>1. I takes the .yuv file as input and decodes it into frames.</div><div>2. Convert the frame to AVPicture.</div><div>3. Encodes for each AVPicture.</div>
<div>4. write the encoded data to a file with mp4 extension.</div><div><br></div><div>But my mp4 file is not playing on player.</div><div>What am I missing?</div><div>Am I going the right direction?<br><br>This is my source code:<br>
<br><br>- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {<br><br> int counter=1;<br> int videoStream,i;<br> <br> av_register_all();<br> AVFormatContext *pFormatCtx;<br> if(av_open_input_file(&pFormatCtx, "/Users/Shared/test.avi", nil,0, nil)!=0)<br>
NSLog(@"could not open file");<br> <br> if (av_find_stream_info(pFormatCtx)<0) {<br> NSLog(@"4444444444");<br> <br> }<br> dump_format(pFormatCtx, 0, "/Users/Shared/test.avi", 0);<br>
<br> <br> AVCodecContext *pCodecCtx;<br> <br> // Find the first video stream<br> videoStream=-1;<br> for(i=0; i<pFormatCtx->nb_streams; i++)<br> if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) {<br>
videoStream=i;<br> break;<br> }<br> if(videoStream==-1)<br> NSLog(@"4444444444"); // Didn't find a video stream<br> <br> // Get a pointer to the codec context for the video stream<br>
pCodecCtx=pFormatCtx->streams[videoStream]->codec;<br> AVCodec *pCodec;<br> // Find the decoder for the video stream<br> pCodec=avcodec_find_decoder(pCodecCtx->codec_id);<br> if(pCodec==NULL) {<br>
fprintf(stderr, "Unsupported codec!\n");<br> NSLog(@"4444444444"); // Codec not found<br> }<br> // Open codec<br> if(avcodec_open(pCodecCtx, pCodec)<0)<br> NSLog(@"4444444444"); // Could not open codec<br>
AVFrame *pFrame;<br> <br> // Allocate video frame<br> pFrame=avcodec_alloc_frame();<br> int frameFinished;<br> AVPacket packet;<br> //=============================================================================<br>
<br> //==================================================================<br> i=0;<br> <br> //======================Lekha<br> <br> char *filename = "/Users/Shared/abc.mp4";<br> AVCodec *codec;<br>
//AVPicture *avPicture;<br> AVCodecContext *c= NULL;<br> int out_size, size, outbuf_size;<br> FILE *f;<br> AVFrame *picture;<br> uint8_t *outbuf;<br> <br> printf("Video encoding\n");<br>
<br> /* find the mpeg video encoder */<br> codec = avcodec_find_encoder(CODEC_ID_MPEG4);<br> if (!codec) {<br> fprintf(stderr, "codec not found\n");<br> exit(1);<br> }<br> <br> c= avcodec_alloc_context();<br>
picture= avcodec_alloc_frame();<br> <br> /* put sample parameters */<br> c->bit_rate = 346000;<br> /* resolution must be a multiple of two */<br> c->width = 640;<br> c->height = 480;<br> /* frames per second */<br>
c->time_base= (AVRational){1,25};<br> c->gop_size = 10; /* emit one intra frame every ten frames */<br> c->max_b_frames=1;<br> c->pix_fmt = PIX_FMT_YUV420P;<br> <br> /* open it */<br> if (avcodec_open(c, codec) < 0) {<br>
fprintf(stderr, "could not open codec\n");<br> exit(1);<br> }<br> <br> f = fopen(filename, "wb");<br> if (!f) {<br> fprintf(stderr, "could not open %s\n", filename);<br>
exit(1);<br> }<br> <br> /* alloc image and output buffer */<br> outbuf_size = 930000;<br> outbuf = malloc(outbuf_size);<br> size = c->width * c->height;<br> <br>#pragma mark -<br> AVFrame* outpic = avcodec_alloc_frame();<br>
int nbytes = avpicture_get_size(PIX_FMT_YUV420P, c->width, c->height);<br> <br> //create buffer for the output image<br> uint8_t* outbuffer = (uint8_t*)av_malloc(nbytes);<br> <br> while(av_read_frame(pFormatCtx, &packet)>=0) {<br>
// Is this a packet from the video stream?<br> if(packet.stream_index==videoStream) {<br> // Decode video frame<br> NSLog(@"decoding start-------------");<br> avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,<br>
&packet);<br> <br> // Did we get a video frame?<br> if(frameFinished) {<br> <br> for(i=0;i<1;i++)<br> {<br>
<br> fflush(stdout);<br> <br> int numBytes = avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);<br> uint8_t *buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));<br>
<br> //NSImage *image = [[NSImage alloc]initWithContentsOfFile:[NSString stringWithFormat:@"/Users/Shared/frame%d.ppm", i]];<br> <br> int outPutWidth = c->width;<br>
int outPutHeight = c->height;<br> <br> AVPicture pict;<br> avpicture_alloc(&pict, PIX_FMT_RGB24, outPutWidth, outPutHeight);<br> <br>
// Setup scaler<br> static int sws_flags = SWS_FAST_BILINEAR;<br> img_convert_ctx = sws_getContext(c->width, <br> c->height,<br>
c->pix_fmt,<br> outPutWidth, <br> outPutHeight,<br> PIX_FMT_RGB24,<br>
sws_flags, NULL, NULL, NULL);<br> <br> <br> CGImageRef newCgImage1 = [self imageFromAVPicture:(AVPicture)pict width:outPutWidth height:outPutHeight];<br>
<br> <br> NSImage *image = [[NSImage alloc] initWithCGImage:newCgImage1 size:NSMakeSize(outPutWidth, outPutHeight)];<br> CGImageSourceRef source;<br>
<br> source = CGImageSourceCreateWithData((CFDataRef)[image TIFFRepresentation], NULL);<br> CGImageRef newCgImage = CGImageSourceCreateImageAtIndex(source, 0, NULL);<br>
<br> CGDataProviderRef dataProvider = CGImageGetDataProvider(newCgImage);<br> CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);<br> buffer = (uint8_t *)CFDataGetBytePtr(bitmapData); <br>
<br> avpicture_fill((AVPicture*)picture, buffer, PIX_FMT_RGB8, c->width, c->height);<br> avpicture_fill((AVPicture*)outpic, outbuffer, PIX_FMT_YUV420P, c->width, c->height);<br>
<br> struct SwsContext* fooContext = sws_getContext(c->width, c->height, <br> PIX_FMT_RGB8, <br> pCodecCtx->width, pCodecCtx->height, <br>
PIX_FMT_YUV420P, <br> SWS_FAST_BILINEAR, NULL, NULL, NULL);<br> <br>
//perform the conversion<br>
sws_scale(fooContext, picture->data, picture->linesize, 0, c->height, outpic->data, outpic->linesize);<br> // Here is where I try to convert to YUV<br> <br>
/* encode the image */<br> out_size = avcodec_encode_video(c, outbuf, outbuf_size, outpic);<br> printf("encoding frame %3d (size=%5d)\n", i, out_size);<br>
fwrite(outbuf, 1, out_size, f);<br> <br> free(buffer);<br> buffer = NULL; <br> <br> //FILE *pFile;<br>// char szFilename[32];<br>
// int y;<br>// <br>// // Open file<br>// sprintf(szFilename, "/Users/Shared/frame%d.ppm", i);<br>// pFile=fopen(szFilename, "wb");<br>
// if(pFile==NULL)<br>// return;<br>// <br>// // Write header<br>// fprintf(pFile, "P6\n%d %d\n255\n",640,480);<br>// <br>
// // Write pixel data<br>// for(y=0; y<480; y++)<br>// fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1, 640*3, pFile);<br>// <br>// // Close file<br>
// fclose(pFile);<br> <br> }<br> <br> <br> }<br> <br> <br> }<br> }<br> /* get the delayed frames */<br> //for(; out_size; i++) {<br>
// fflush(stdout);<br>// <br>// out_size = avcodec_encode_video(c, outbuf, outbuf_size, NULL);<br>// printf("write frame %3d (size=%5d)\n", i, out_size);<br>// fwrite(outbuf, 1, outbuf_size, f); <br>
// }<br> <br> /* add sequence end code to have a real mpeg file */<br> outbuf[0] = 0x00;<br> outbuf[1] = 0x00;<br> outbuf[2] = 0x01;<br> outbuf[3] = 0xb7;<br> fwrite(outbuf, 1, 4, f);<br> fclose(f);<br>
free(outbuf);<br> <br> avcodec_close(c);<br> av_free(c);<br> av_free(picture);<br> printf("\n");<br> NSLog(@"finished");<br><br> }<br><br></div><div><br></div><div>
here is my code:</div><div><br></div><div><br></div><div>If there is any sample code/ tutorial related to same topic then please help.</div><div><br></div><div>T</div><div><br></div><div><br></div><div>
<div><br clear="all"><br>-- <br>Lekha Mishra<br>New Generation Application Pvt. Ltd<br>Software Developer<br>90444149852<br>
</div></div>