This source file includes following definitions.
- utvideo_decode_init
- utvideo_decode_frame
- utvideo_decode_close
extern "C" {
#include "avcodec.h"
}
#include "libutvideo.h"
#include "get_bits.h"
static av_cold int utvideo_decode_init(AVCodecContext *avctx)
{
UtVideoContext *utv = (UtVideoContext *)avctx->priv_data;
UtVideoExtra info;
int format;
int begin_ret;
if (avctx->extradata_size != 16 && avctx->extradata_size != 8 ) {
av_log(avctx, AV_LOG_ERROR, "Extradata size (%d) mismatch.\n", avctx->extradata_size);
return -1;
}
info.version = AV_RL32(avctx->extradata);
info.original_format = AV_RL32(avctx->extradata + 4);
info.frameinfo_size = AV_RL32(avctx->extradata + 8);
info.flags = AV_RL32(avctx->extradata + 12);
switch (avctx->codec_tag) {
#ifdef UTV_BT709
case MKTAG('U', 'L', 'H', '0'):
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
avctx->colorspace = AVCOL_SPC_BT709;
format = UTVF_YV12;
break;
case MKTAG('U', 'L', 'H', '2'):
avctx->pix_fmt = AV_PIX_FMT_YUYV422;
avctx->colorspace = AVCOL_SPC_BT709;
format = UTVF_YUY2;
break;
#endif
case MKTAG('U', 'L', 'Y', '0'):
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
format = UTVF_YV12;
break;
case MKTAG('U', 'L', 'Y', '2'):
avctx->pix_fmt = AV_PIX_FMT_YUYV422;
format = UTVF_YUY2;
break;
case MKTAG('U', 'L', 'R', 'G'):
avctx->pix_fmt = AV_PIX_FMT_BGR24;
format = UTVF_NFCC_BGR_BU;
break;
case MKTAG('U', 'L', 'R', 'A'):
avctx->pix_fmt = AV_PIX_FMT_RGB32;
format = UTVF_NFCC_BGRA_BU;
break;
#ifdef UTVF_UQY2
case MKTAG('U', 'Q', 'Y', '2'):
avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
format = UTVF_v210;
break;
#endif
default:
av_log(avctx, AV_LOG_ERROR,
"Not a Ut Video FOURCC: %X\n", avctx->codec_tag);
return -1;
}
utv->buf_size = avpicture_get_size(avctx->pix_fmt, avctx->width, avctx->height);
#ifdef UTVF_UQY2
if (format == UTVF_v210)
utv->buf_size += avctx->height * ((avctx->width + 47) / 48) * 128;
#endif
utv->buffer = (uint8_t *)av_malloc(utv->buf_size * sizeof(uint8_t));
if (utv->buffer == NULL) {
av_log(avctx, AV_LOG_ERROR, "Unable to allocate output buffer.\n");
return -1;
}
avctx->coded_frame = av_frame_alloc();
avctx->bits_per_raw_sample = 8;
avctx->coded_frame->interlaced_frame = info.flags & 0x800 ? 1 : 0;
avctx->coded_frame->top_field_first = 1;
utv->codec = CCodec::CreateInstance(UNFCC(avctx->codec_tag), "libavcodec");
begin_ret = utv->codec->DecodeBegin(format, avctx->width, avctx->height,
CBGROSSWIDTH_WINDOWS, &info, sizeof(UtVideoExtra));
if (begin_ret != 0) {
av_log(avctx, AV_LOG_ERROR,
"Could not initialize decoder: %d\n", begin_ret);
return -1;
}
return 0;
}
static int utvideo_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame, AVPacket *avpkt)
{
UtVideoContext *utv = (UtVideoContext *)avctx->priv_data;
AVFrame *pic = avctx->coded_frame;
int w = avctx->width, h = avctx->height;
pic->reference = 0;
pic->pict_type = AV_PICTURE_TYPE_I;
pic->key_frame = 1;
utv->codec->DecodeFrame(utv->buffer, avpkt->data, true);
switch (avctx->pix_fmt) {
case AV_PIX_FMT_YUV420P:
pic->linesize[0] = w;
pic->linesize[1] = pic->linesize[2] = w / 2;
pic->data[0] = utv->buffer;
pic->data[2] = utv->buffer + (w * h);
pic->data[1] = pic->data[2] + (w * h / 4);
break;
case AV_PIX_FMT_YUYV422:
pic->linesize[0] = w * 2;
pic->data[0] = utv->buffer;
break;
case AV_PIX_FMT_YUV422P10: {
uint16_t *y, *u, *v;
int i,j;
int linesize = ((w + 47) / 48) * 128;
pic->linesize[0] = w * 2;
pic->linesize[1] =
pic->linesize[2] = w;
pic->data[0] = utv->buffer + linesize * h;
pic->data[1] = pic->data[0] + h*pic->linesize[0];
pic->data[2] = pic->data[1] + h*pic->linesize[1];
y = (uint16_t*)pic->data[0];
u = (uint16_t*)pic->data[1];
v = (uint16_t*)pic->data[2];
for (j = 0; j < h; j++) {
const uint8_t *in = utv->buffer + j * linesize;
for (i = 0; i + 1 < w; i += 6, in += 4) {
unsigned a,b;
a = AV_RL32(in);
in += 4;
b = AV_RL32(in);
*u++ = (a ) & 0x3FF;
*y++ = (a>>10) & 0x3FF;
*v++ = (a>>20) & 0x3FF;
*y++ = (b ) & 0x3FF;
if (i + 3 >= w)
break;
in += 4;
a = AV_RL32(in);
*u++ = (b>>10) & 0x3FF;
*y++ = (b>>20) & 0x3FF;
*v++ = (a ) & 0x3FF;
*y++ = (a>>10) & 0x3FF;
if (i + 5 >= w)
break;
in += 4;
b = AV_RL32(in);
*u++ = (a>>20) & 0x3FF;
*y++ = (b ) & 0x3FF;
*v++ = (b>>10) & 0x3FF;
*y++ = (b>>20) & 0x3FF;
}
}
break;
}
case AV_PIX_FMT_BGR24:
case AV_PIX_FMT_RGB32:
pic->linesize[0] = -1 * w * (avctx->pix_fmt == AV_PIX_FMT_BGR24 ? 3 : 4);
pic->data[0] = utv->buffer + utv->buf_size + pic->linesize[0];
break;
}
*got_frame = 1;
av_frame_move_ref((AVFrame*)data, pic);
return avpkt->size;
}
static av_cold int utvideo_decode_close(AVCodecContext *avctx)
{
UtVideoContext *utv = (UtVideoContext *)avctx->priv_data;
av_frame_free(&avctx->coded_frame);
av_freep(&utv->buffer);
utv->codec->DecodeEnd();
CCodec::DeleteInstance(utv->codec);
return 0;
}
AVCodec ff_libutvideo_decoder = {
"libutvideo",
NULL_IF_CONFIG_SMALL("Ut Video"),
AVMEDIA_TYPE_VIDEO,
AV_CODEC_ID_UTVIDEO,
0,
NULL,
NULL,
NULL,
NULL,
NULL,
0,
NULL,
NULL,
sizeof(UtVideoContext),
NULL,
NULL,
NULL,
NULL,
NULL,
utvideo_decode_init,
NULL,
NULL,
utvideo_decode_frame,
utvideo_decode_close,
};