00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00027 extern "C" {
00028 #include "libavutil/avassert.h"
00029 #include "avcodec.h"
00030 #include "internal.h"
00031 }
00032
00033 #include "libutvideo.h"
00034 #include "put_bits.h"
00035
00036 static av_cold int utvideo_encode_init(AVCodecContext *avctx)
00037 {
00038 UtVideoContext *utv = (UtVideoContext *)avctx->priv_data;
00039 UtVideoExtra *info;
00040 uint32_t flags, in_format;
00041
00042 switch (avctx->pix_fmt) {
00043 case PIX_FMT_YUV420P:
00044 in_format = UTVF_YV12;
00045 avctx->bits_per_coded_sample = 12;
00046 avctx->codec_tag = MKTAG('U', 'L', 'Y', '0');
00047 break;
00048 case PIX_FMT_YUYV422:
00049 in_format = UTVF_YUYV;
00050 avctx->bits_per_coded_sample = 16;
00051 avctx->codec_tag = MKTAG('U', 'L', 'Y', '2');
00052 break;
00053 case PIX_FMT_BGR24:
00054 in_format = UTVF_RGB24_WIN;
00055 avctx->bits_per_coded_sample = 24;
00056 avctx->codec_tag = MKTAG('U', 'L', 'R', 'G');
00057 break;
00058 case PIX_FMT_RGB32:
00059 in_format = UTVF_RGB32_WIN;
00060 avctx->bits_per_coded_sample = 32;
00061 avctx->codec_tag = MKTAG('U', 'L', 'R', 'A');
00062 break;
00063 default:
00064 return AVERROR(EINVAL);
00065 }
00066
00067
00068 if (avctx->prediction_method != 0 && avctx->prediction_method != 2) {
00069 av_log(avctx, AV_LOG_ERROR, "Invalid prediction method.\n");
00070 return AVERROR(EINVAL);
00071 }
00072
00073 flags = ((avctx->prediction_method + 1) << 8) | (avctx->thread_count - 1);
00074
00075 avctx->priv_data = utv;
00076 avctx->coded_frame = avcodec_alloc_frame();
00077
00078
00079 info = (UtVideoExtra *)av_malloc(sizeof(*info));
00080
00081 if (info == NULL) {
00082 av_log(avctx, AV_LOG_ERROR, "Could not allocate extradata buffer.\n");
00083 return AVERROR(ENOMEM);
00084 }
00085
00086
00087
00088
00089
00090 utv->buf_size = avpicture_get_size(avctx->pix_fmt,
00091 avctx->width, avctx->height);
00092 utv->buffer = (uint8_t *)av_malloc(utv->buf_size);
00093
00094 if (utv->buffer == NULL) {
00095 av_log(avctx, AV_LOG_ERROR, "Could not allocate output buffer.\n");
00096 return AVERROR(ENOMEM);
00097 }
00098
00099
00100
00101
00102
00103 utv->codec = CCodec::CreateInstance(UNFCC(avctx->codec_tag), "libavcodec");
00104
00105
00106 utv->codec->EncodeBegin(in_format, avctx->width, avctx->height,
00107 CBGROSSWIDTH_WINDOWS);
00108
00109
00110 avctx->extradata_size = utv->codec->EncodeGetExtraDataSize();
00111 utv->codec->EncodeGetExtraData(info, avctx->extradata_size, in_format,
00112 avctx->width, avctx->height,
00113 CBGROSSWIDTH_WINDOWS);
00114 avctx->extradata = (uint8_t *)info;
00115
00116
00117 utv->codec->SetState(&flags, sizeof(flags));
00118
00119 return 0;
00120 }
00121
00122 static int utvideo_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
00123 const AVFrame *pic, int *got_packet)
00124 {
00125 UtVideoContext *utv = (UtVideoContext *)avctx->priv_data;
00126 int w = avctx->width, h = avctx->height;
00127 int ret, rgb_size, i;
00128 bool keyframe;
00129 uint8_t *y, *u, *v;
00130 uint8_t *dst;
00131
00132
00133 if ((ret = ff_alloc_packet2(avctx, pkt, utv->buf_size)) < 0)
00134 return ret;
00135
00136 dst = pkt->data;
00137
00138
00139 switch (avctx->pix_fmt) {
00140 case PIX_FMT_YUV420P:
00141 y = utv->buffer;
00142 u = y + w * h;
00143 v = u + w * h / 4;
00144 for (i = 0; i < h; i++) {
00145 memcpy(y, pic->data[0] + i * pic->linesize[0], w);
00146 y += w;
00147 }
00148 for (i = 0; i < h / 2; i++) {
00149 memcpy(u, pic->data[2] + i * pic->linesize[2], w >> 1);
00150 memcpy(v, pic->data[1] + i * pic->linesize[1], w >> 1);
00151 u += w >> 1;
00152 v += w >> 1;
00153 }
00154 break;
00155 case PIX_FMT_YUYV422:
00156 for (i = 0; i < h; i++)
00157 memcpy(utv->buffer + i * (w << 1),
00158 pic->data[0] + i * pic->linesize[0], w << 1);
00159 break;
00160 case PIX_FMT_BGR24:
00161 case PIX_FMT_RGB32:
00162
00163 rgb_size = avctx->pix_fmt == PIX_FMT_BGR24 ? 3 : 4;
00164 for (i = 0; i < h; i++)
00165 memcpy(utv->buffer + (h - i - 1) * w * rgb_size,
00166 pic->data[0] + i * pic->linesize[0],
00167 w * rgb_size);
00168 break;
00169 default:
00170 return AVERROR(EINVAL);
00171 }
00172
00173
00174 pkt->size = utv->codec->EncodeFrame(dst, &keyframe, utv->buffer);
00175
00176 if (!pkt->size) {
00177 av_log(avctx, AV_LOG_ERROR, "EncodeFrame failed!\n");
00178 return AVERROR_INVALIDDATA;
00179 }
00180
00181
00182
00183
00184
00185
00186
00187 av_assert2(keyframe == true);
00188 avctx->coded_frame->key_frame = 1;
00189 avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
00190
00191 pkt->flags |= AV_PKT_FLAG_KEY;
00192 *got_packet = 1;
00193 return 0;
00194 }
00195
00196 static av_cold int utvideo_encode_close(AVCodecContext *avctx)
00197 {
00198 UtVideoContext *utv = (UtVideoContext *)avctx->priv_data;
00199
00200 av_freep(&avctx->coded_frame);
00201 av_freep(&avctx->extradata);
00202 av_freep(&utv->buffer);
00203
00204 utv->codec->EncodeEnd();
00205 CCodec::DeleteInstance(utv->codec);
00206
00207 return 0;
00208 }
00209
00210 AVCodec ff_libutvideo_encoder = {
00211 "libutvideo",
00212 NULL_IF_CONFIG_SMALL("Ut Video"),
00213 AVMEDIA_TYPE_VIDEO,
00214 CODEC_ID_UTVIDEO,
00215 CODEC_CAP_AUTO_THREADS | CODEC_CAP_LOSSLESS,
00216 NULL,
00217 (const enum PixelFormat[]) {
00218 PIX_FMT_YUV420P, PIX_FMT_YUYV422, PIX_FMT_BGR24,
00219 PIX_FMT_RGB32, PIX_FMT_NONE
00220 },
00221 NULL,
00222 NULL,
00223 NULL,
00224 0,
00225 NULL,
00226 NULL,
00227 sizeof(UtVideoContext),
00228 NULL,
00229 NULL,
00230 NULL,
00231 NULL,
00232 NULL,
00233 utvideo_encode_init,
00234 NULL,
00235 utvideo_encode_frame,
00236 NULL,
00237 utvideo_encode_close,
00238 NULL,
00239 };