<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=ISO-8859-15">
</head>
<body bgcolor="#FFFFFF" text="#000000">
Hi,<br>
<br>
im new to libav and video compression at all. Im working on Windows
7 in Visual Studio 2010 and i try to simply encode a dummy image to
x264 using an actual Zeranoe-Build (x86).<br>
<br>
First i tried to use the output-example which doesnt work with x264
out of the box. I put many code snippets together and often i really
dont know what the code does - so my error might be anywhere. <br>
<br>
So its saidly a lot of code but the main problem existing in <b>write_video_frame</b>
where the program crashes on <b>av_interleaved_write_frame</b> (so
you may have a look on that)! Encoding works so far (as far as i can
say -> no errors shown)- takes me a lot of time and i got help
from zeranoe forum (thread:
<a class="moz-txt-link-freetext" href="http://ffmpeg.zeranoe.com/forum/viewtopic.php?f=15&t=534&p=1581#p1581">http://ffmpeg.zeranoe.com/forum/viewtopic.php?f=15&t=534&p=1581#p1581</a>).<br>
<br>
I really need ffmpeg / libav for x264 encoding in my project @
university and i dont know what's wrong and to do :/!<br>
<br>
<br>
<br>
#include <iostream><br>
#include <string><br>
<br>
#include <stdlib.h><br>
#include <stdio.h><br>
<br>
<br>
extern "C" {<br>
#include <avcodec.h><br>
#include <avformat.h><br>
#include <swscale.h><br>
#include <avio.h><br>
#include <opt.h><br>
#include "libavutil/imgutils.h"<br>
}<br>
<br>
#define WIDTH 800<br>
#define HEIGHT 480<br>
#define STREAM_NB_FRAMES ((int)(STREAM_DURATION * FRAME_RATE))<br>
#define FRAME_RATE 25<br>
#define PIXEL_FORMAT PIX_FMT_YUV420P<br>
#define STREAM_DURATION 1 //seconds<br>
#define BIT_RATE 400000<br>
<br>
static int sws_flags = SWS_BICUBIC;<br>
<br>
AVFrame *picture, *tmp_picture;<br>
uint8_t *video_outbuf;<br>
int frame_count, video_outbuf_size;<br>
<br>
static void closeVideo(AVFormatContext *oc, AVStream *st)<br>
{<br>
avcodec_close(st->codec);<br>
av_free(picture->data[0]);<br>
av_free(picture);<br>
if (tmp_picture)<br>
{<br>
av_free(tmp_picture->data[0]);<br>
av_free(tmp_picture);<br>
}<br>
av_free(video_outbuf);<br>
}<br>
<br>
static AVFrame *alloc_picture(enum PixelFormat pix_fmt, int width,
int height)<br>
{<br>
AVFrame *picture;<br>
uint8_t *picture_buf;<br>
int size;<br>
<br>
picture = avcodec_alloc_frame();<br>
if(!picture)<br>
return NULL;<br>
size = avpicture_get_size(pix_fmt, width, height);<br>
picture_buf = (uint8_t*)(av_malloc(size));<br>
if (!picture_buf)<br>
{<br>
av_free(picture);<br>
return NULL;<br>
}<br>
avpicture_fill((AVPicture *) picture, picture_buf, pix_fmt,
WIDTH, HEIGHT);<br>
return picture;<br>
}<br>
<br>
static void openVideo(AVFormatContext *oc, AVStream *st)<br>
{<br>
AVCodec *codec;<br>
AVCodecContext *c;<br>
<br>
c = st->codec;<br>
//if(c->idct_algo == CodecID::CODEC_ID_H264)<br>
// av_opt_set(c->priv_data, "preset", "slow", 0);<br>
<br>
codec = avcodec_find_encoder(c->codec_id);<br>
if(!codec)<br>
{<br>
std::cout << "Codec not found." << std::endl;<br>
std::cin.get();std::cin.get();exit(1);<br>
}<br>
<br>
if(avcodec_get_context_defaults3 (c, codec) < 0)<br>
{<br>
std::cout << "Cannot get default codec context! \n"
<< std::endl;<br>
std::cin.get();<br>
exit(1);<br>
}<br>
c->bit_rate = BIT_RATE;<br>
c->width = WIDTH;<br>
c->height = HEIGHT;<br>
c->time_base.den = FRAME_RATE;<br>
c->time_base.num = 1;<br>
c->gop_size = FRAME_RATE;<br>
c->pix_fmt = PIX_FMT_YUV420P;<br>
<br>
if(oc->oformat->flags & AVFMT_GLOBALHEADER)<br>
c->flags |= CODEC_FLAG_GLOBAL_HEADER; <br>
<br>
if(avcodec_open2(c, codec, NULL) < 0)<br>
{<br>
std::cout << "Could not open codec." <<
std::endl;<br>
std::cin.get();std::cin.get();exit(1);<br>
}<br>
video_outbuf = NULL;<br>
if(!(oc->oformat->flags & AVFMT_RAWPICTURE))<br>
{<br>
video_outbuf_size = 200000;<br>
video_outbuf = (uint8_t*)(av_malloc(video_outbuf_size));<br>
}<br>
picture = alloc_picture(c->pix_fmt, c->width,
c->height);<br>
if(!picture)<br>
{<br>
std::cout << "Could not allocate picture" <<
std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
tmp_picture = NULL;<br>
if(c->pix_fmt != PIX_FMT_YUV420P)<br>
{<br>
tmp_picture = alloc_picture(PIX_FMT_YUV420P, WIDTH, HEIGHT);<br>
if(!tmp_picture)<br>
{<br>
std::cout << " Could not allocate temporary
picture" << std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
}<br>
}<br>
<br>
<br>
static AVStream* addVideoStream(AVFormatContext *context, enum
CodecID codecID)<br>
{<br>
AVCodecContext *codec;<br>
AVStream *stream;<br>
stream = av_new_stream(context, 0);<br>
if(!stream)<br>
{<br>
std::cout << "Could not alloc stream." <<
std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
<br>
codec = stream->codec;<br>
codec->codec_id = codecID;<br>
codec->codec_type = AVMEDIA_TYPE_VIDEO;<br>
<br>
// sample rate<br>
codec->bit_rate = BIT_RATE;<br>
// resolution must be a multiple of two<br>
codec->width = WIDTH;<br>
codec->height = HEIGHT;<br>
codec->time_base.den = FRAME_RATE; // stream fps<br>
codec->time_base.num = 1;<br>
codec->gop_size = FRAME_RATE; // intra frame every twelve
frames at most<br>
codec->pix_fmt = PIXEL_FORMAT;<br>
if(codec->codec_id == CODEC_ID_MPEG2VIDEO)<br>
codec->max_b_frames = 2; // for testing, B frames<br>
<br>
if(codec->codec_id == CODEC_ID_MPEG1VIDEO)<br>
codec->mb_decision = 2;<br>
<br>
if(context->oformat->flags & AVFMT_GLOBALHEADER)<br>
codec->flags |= CODEC_FLAG_GLOBAL_HEADER;<br>
<br>
return stream;<br>
}<br>
<br>
static void fill_yuv_image(AVFrame *pict, int frame_index, int
width, int height)<br>
{<br>
int x, y, i;<br>
i = frame_index;<br>
<br>
/* Y */<br>
for(y=0;y<height;y++) {<br>
for(x=0;x<width;x++) {<br>
pict->data[0][y * pict->linesize[0] + x] = x + y +
i * 3;<br>
}<br>
}<br>
<br>
/* Cb and Cr */<br>
for(y=0;y<height/2;y++) {<br>
for(x=0;x<width/2;x++) {<br>
pict->data[1][y * pict->linesize[1] + x] = 128 + y
+ i * 2;<br>
pict->data[2][y * pict->linesize[2] + x] = 64 + x
+ i * 5;<br>
}<br>
}<br>
}<br>
<br>
static void write_video_frame(AVFormatContext *oc, AVStream *st)<br>
{<br>
int out_size, ret;<br>
AVCodecContext *c;<br>
static struct SwsContext *img_convert_ctx;<br>
c = st->codec;<br>
<br>
if(frame_count >= STREAM_NB_FRAMES)<br>
{<br>
<br>
}<br>
else<br>
{<br>
if(c->pix_fmt != PIX_FMT_YUV420P)<br>
{<br>
if(img_convert_ctx = NULL)<br>
{<br>
img_convert_ctx = sws_getContext(WIDTH, HEIGHT,
PIX_FMT_YUV420P, WIDTH, HEIGHT,<br>
c->pix_fmt,
sws_flags, NULL, NULL, NULL);<br>
if(img_convert_ctx == NULL)<br>
{<br>
std::cout << "Cannot initialize the
conversion context" << std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
}<br>
fill_yuv_image(tmp_picture, frame_count, WIDTH, HEIGHT);<br>
sws_scale(img_convert_ctx, tmp_picture->data,
tmp_picture->linesize, 0, HEIGHT,<br>
picture->data, picture->linesize);<br>
}<br>
else<br>
{<br>
fill_yuv_image(picture, frame_count, WIDTH, HEIGHT);<br>
}<br>
}<br>
<br>
if (oc->oformat->flags & AVFMT_RAWPICTURE) {<br>
/* raw video case. The API will change slightly in the near<br>
futur for that */<br>
AVPacket pkt;<br>
av_init_packet(&pkt);<br>
<br>
pkt.flags |= AV_PKT_FLAG_KEY;<br>
pkt.stream_index= st->index;<br>
pkt.data= (uint8_t *)picture;<br>
pkt.size= sizeof(AVPicture);<br>
<br>
<br>
ret = av_interleaved_write_frame(oc, &pkt);<br>
} else {<br>
/* encode the image */<br>
picture->pts = (float) frame_count *
(1000.0/(float)(FRAME_RATE)) * 90;<br>
int result, result2;<br>
AVPacket pkt;<br>
av_init_packet(&pkt);<br>
pkt.dts = AV_NOPTS_VALUE;<br>
pkt.stream_index = st->index;<br>
pkt.data = video_outbuf;<br>
pkt.size = video_outbuf_size;<br>
pkt.duration = 0;<br>
result2 = avcodec_encode_video2(c, &pkt, picture,
&result);<br>
if (result == 1 && result2 == 0) {<br>
ret = av_interleaved_write_frame(oc, &pkt); // CRASH
!!!<br>
} else {<br>
ret = 0;<br>
}<br>
}<br>
if (ret != 0) {<br>
std::cout << "Error while writing video frames"
<< std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
frame_count++;<br>
}<br>
<br>
int main(int argc, char** argv)<br>
{<br>
const char* filename = "test.h264";<br>
AVOutputFormat *outputFormat;<br>
AVFormatContext *context;<br>
AVCodecContext *codec;<br>
AVStream *videoStream;<br>
double videoPTS;<br>
<br>
// init libavcodec, register all codecs and formats<br>
av_register_all(); <br>
avcodec_register_all();<br>
// auto detect the output format from the name<br>
outputFormat = av_guess_format(NULL, filename, NULL);<br>
if(!outputFormat)<br>
{<br>
std::cout << "Cannot guess output format! Using mpeg!"
<< std::endl;<br>
std::cin.get();<br>
outputFormat = av_guess_format(NULL, "h263" , NULL);<br>
}<br>
if(!outputFormat)<br>
{<br>
std::cout << "Could not find suitable output format."
<< std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
<br>
context = avformat_alloc_context();<br>
if(!context)<br>
{<br>
std::cout << "Cannot allocate avformat memory."
<< std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
context->oformat = outputFormat;<br>
sprintf_s(context->filename, sizeof(context->filename),
"%s", filename);<br>
std::cout << "Is '" << context->filename <<
"' = '" << filename << "'" << std::endl;<br>
<br>
videoStream = NULL;<br>
outputFormat->audio_codec = CODEC_ID_NONE;<br>
videoStream = addVideoStream(context,
outputFormat->video_codec);<br>
<br>
/* still needed?<br>
if(av_set_parameters(context, NULL) < 0)<br>
{<br>
std::cout << "Invalid output format parameters."
<< std::endl;<br>
exit(0);<br>
}*/<br>
<br>
av_dump_format(context, 0, filename, 1);<br>
<br>
if(videoStream)<br>
openVideo(context, videoStream);<br>
<br>
if(!outputFormat->flags & AVFMT_NOFILE)<br>
{<br>
if(avio_open(&context->pb, filename,
AVIO_FLAG_READ_WRITE) < 0)<br>
{<br>
std::cout << "Could not open " << filename
<< std::endl;<br>
std::cin.get();exit(1);<br>
}<br>
}<br>
<br>
avformat_write_header(context, 0);<br>
<br>
while(true)<br>
{<br>
if(videoStream)<br>
videoPTS = (double) videoStream->pts.val *
videoStream->time_base.num / videoStream->time_base.den;<br>
else<br>
videoPTS = 0.;<br>
<br>
if((!videoStream || videoPTS >= STREAM_DURATION))<br>
{<br>
break;<br>
}<br>
write_video_frame(context, videoStream);<br>
}<br>
av_write_trailer(context);<br>
if(videoStream)<br>
closeVideo(context, videoStream);<br>
for(int i = 0; i < context->nb_streams; i++)<br>
{<br>
av_freep(&context->streams[i]->codec);<br>
av_freep(&context->streams[i]);<br>
}<br>
<br>
if(!(outputFormat->flags & AVFMT_NOFILE))<br>
{<br>
avio_close(context->pb);<br>
}<br>
av_free(context);<br>
std::cin.get();<br>
return 0;<br>
};<br>
</body>
</html>