00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024 #include "avcodec.h"
00025 #include "bytestream.h"
00026 #include "internal.h"
00027
00028 static av_cold int encode_init(AVCodecContext *avctx)
00029 {
00030 if (avctx->width & 1) {
00031 av_log(avctx, AV_LOG_ERROR, "v210 needs even width\n");
00032 return AVERROR(EINVAL);
00033 }
00034
00035 if (avctx->bits_per_raw_sample != 10)
00036 av_log(avctx, AV_LOG_WARNING, "bits per raw sample: %d != 10-bit\n",
00037 avctx->bits_per_raw_sample);
00038
00039 avctx->coded_frame = avcodec_alloc_frame();
00040 if (!avctx->coded_frame)
00041 return AVERROR(ENOMEM);
00042
00043 avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
00044
00045 return 0;
00046 }
00047
00048 static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
00049 const AVFrame *pic, int *got_packet)
00050 {
00051 int aligned_width = ((avctx->width + 47) / 48) * 48;
00052 int stride = aligned_width * 8 / 3;
00053 int line_padding = stride - ((avctx->width * 8 + 11) / 12) * 4;
00054 int h, w, ret;
00055 const uint16_t *y = (const uint16_t*)pic->data[0];
00056 const uint16_t *u = (const uint16_t*)pic->data[1];
00057 const uint16_t *v = (const uint16_t*)pic->data[2];
00058 PutByteContext p;
00059
00060 if ((ret = ff_alloc_packet2(avctx, pkt, avctx->height * stride)) < 0)
00061 return ret;
00062
00063 bytestream2_init_writer(&p, pkt->data, pkt->size);
00064
00065 #define CLIP(v) av_clip(v, 4, 1019)
00066
00067 #define WRITE_PIXELS(a, b, c) \
00068 do { \
00069 val = CLIP(*a++); \
00070 val |= (CLIP(*b++) << 10) | \
00071 (CLIP(*c++) << 20); \
00072 bytestream2_put_le32u(&p, val); \
00073 } while (0)
00074
00075 for (h = 0; h < avctx->height; h++) {
00076 uint32_t val;
00077 for (w = 0; w < avctx->width - 5; w += 6) {
00078 WRITE_PIXELS(u, y, v);
00079 WRITE_PIXELS(y, u, y);
00080 WRITE_PIXELS(v, y, u);
00081 WRITE_PIXELS(y, v, y);
00082 }
00083 if (w < avctx->width - 1) {
00084 WRITE_PIXELS(u, y, v);
00085
00086 val = CLIP(*y++);
00087 if (w == avctx->width - 2)
00088 bytestream2_put_le32u(&p, val);
00089 if (w < avctx->width - 3) {
00090 val |= (CLIP(*u++) << 10) | (CLIP(*y++) << 20);
00091 bytestream2_put_le32u(&p, val);
00092
00093 val = CLIP(*v++) | (CLIP(*y++) << 10);
00094 bytestream2_put_le32u(&p, val);
00095 }
00096 }
00097
00098 bytestream2_set_buffer(&p, 0, line_padding);
00099
00100 y += pic->linesize[0] / 2 - avctx->width;
00101 u += pic->linesize[1] / 2 - avctx->width / 2;
00102 v += pic->linesize[2] / 2 - avctx->width / 2;
00103 }
00104
00105 pkt->flags |= AV_PKT_FLAG_KEY;
00106 *got_packet = 1;
00107 return 0;
00108 }
00109
00110 static av_cold int encode_close(AVCodecContext *avctx)
00111 {
00112 av_freep(&avctx->coded_frame);
00113
00114 return 0;
00115 }
00116
00117 AVCodec ff_v210_encoder = {
00118 .name = "v210",
00119 .type = AVMEDIA_TYPE_VIDEO,
00120 .id = AV_CODEC_ID_V210,
00121 .init = encode_init,
00122 .encode2 = encode_frame,
00123 .close = encode_close,
00124 .pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV422P10, PIX_FMT_NONE },
00125 .long_name = NULL_IF_CONFIG_SMALL("Uncompressed 4:2:2 10-bit"),
00126 };