This source file includes following definitions.
- GetThreadCount
- VP9FrameBuffer
- GetFreeFrameBuffer
- GetVP9FrameBuffer
- ReleaseVP9FrameBuffer
- CreateFrameCallback
- OnVideoFrameDestroyed
- vpx_codec_alpha_
- Initialize
- InitializeVpxContext
- ConfigureDecoder
- CloseDecoder
- Reset
- Stop
- HasAlpha
- DecodeBuffer
- VpxDecode
- CopyVpxImageTo
#include "media/filters/vpx_video_decoder.h"
#include <algorithm>
#include <string>
#include <vector>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/single_thread_task_runner.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "base/sys_byteorder.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
#include "media/base/limits.h"
#include "media/base/media_switches.h"
#include "media/base/pipeline.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#define VPX_CODEC_DISABLE_COMPAT 1
extern "C" {
#include "third_party/libvpx/source/libvpx/vpx/vpx_decoder.h"
#include "third_party/libvpx/source/libvpx/vpx/vpx_frame_buffer.h"
#include "third_party/libvpx/source/libvpx/vpx/vp8dx.h"
}
namespace media {
static const int kDecodeThreads = 2;
static const int kMaxDecodeThreads = 16;
static int GetThreadCount(const VideoDecoderConfig& config) {
int decode_threads = kDecodeThreads;
const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
std::string threads(cmd_line->GetSwitchValueASCII(switches::kVideoThreads));
if (threads.empty() || !base::StringToInt(threads, &decode_threads)) {
if (config.codec() == kCodecVP9) {
if (config.coded_size().width() >= 2048)
decode_threads = 8;
else if (config.coded_size().width() >= 1024)
decode_threads = 4;
}
return decode_threads;
}
decode_threads = std::max(decode_threads, 0);
decode_threads = std::min(decode_threads, kMaxDecodeThreads);
return decode_threads;
}
static const uint32 kVP9MaxFrameBuffers = VP9_MAXIMUM_REF_BUFFERS +
VPX_MAXIMUM_WORK_BUFFERS +
limits::kMaxVideoFrames;
class VpxVideoDecoder::MemoryPool
: public base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool> {
public:
MemoryPool();
static int32 GetVP9FrameBuffer(void* user_priv, size_t min_size,
vpx_codec_frame_buffer* fb);
static int32 ReleaseVP9FrameBuffer(void *user_priv,
vpx_codec_frame_buffer *fb);
base::Closure CreateFrameCallback(void* fb_priv_data);
private:
friend class base::RefCountedThreadSafe<VpxVideoDecoder::MemoryPool>;
~MemoryPool();
struct VP9FrameBuffer {
VP9FrameBuffer() : ref_cnt(0) {}
std::vector<uint8> data;
uint32 ref_cnt;
};
VP9FrameBuffer* GetFreeFrameBuffer(size_t min_size);
void OnVideoFrameDestroyed(VP9FrameBuffer* frame_buffer);
std::vector<VP9FrameBuffer*> frame_buffers_;
DISALLOW_COPY_AND_ASSIGN(MemoryPool);
};
VpxVideoDecoder::MemoryPool::MemoryPool() {}
VpxVideoDecoder::MemoryPool::~MemoryPool() {
STLDeleteElements(&frame_buffers_);
}
VpxVideoDecoder::MemoryPool::VP9FrameBuffer*
VpxVideoDecoder::MemoryPool::GetFreeFrameBuffer(size_t min_size) {
size_t i = 0;
for (; i < frame_buffers_.size(); ++i) {
if (frame_buffers_[i]->ref_cnt == 0)
break;
}
if (i == frame_buffers_.size()) {
if (i == kVP9MaxFrameBuffers)
return NULL;
frame_buffers_.push_back(new VP9FrameBuffer());
}
if (frame_buffers_[i]->data.size() < min_size)
frame_buffers_[i]->data.resize(min_size);
return frame_buffers_[i];
}
int32 VpxVideoDecoder::MemoryPool::GetVP9FrameBuffer(
void* user_priv, size_t min_size, vpx_codec_frame_buffer* fb) {
DCHECK(user_priv);
DCHECK(fb);
VpxVideoDecoder::MemoryPool* memory_pool =
static_cast<VpxVideoDecoder::MemoryPool*>(user_priv);
VP9FrameBuffer* fb_to_use = memory_pool->GetFreeFrameBuffer(min_size);
if (fb_to_use == NULL)
return -1;
fb->data = &fb_to_use->data[0];
fb->size = fb_to_use->data.size();
++fb_to_use->ref_cnt;
fb->priv = static_cast<void*>(fb_to_use);
return 0;
}
int32 VpxVideoDecoder::MemoryPool::ReleaseVP9FrameBuffer(
void *user_priv, vpx_codec_frame_buffer *fb) {
VP9FrameBuffer* frame_buffer = static_cast<VP9FrameBuffer*>(fb->priv);
--frame_buffer->ref_cnt;
return 0;
}
base::Closure VpxVideoDecoder::MemoryPool::CreateFrameCallback(
void* fb_priv_data) {
VP9FrameBuffer* frame_buffer = static_cast<VP9FrameBuffer*>(fb_priv_data);
++frame_buffer->ref_cnt;
return BindToCurrentLoop(
base::Bind(&MemoryPool::OnVideoFrameDestroyed, this,
frame_buffer));
}
void VpxVideoDecoder::MemoryPool::OnVideoFrameDestroyed(
VP9FrameBuffer* frame_buffer) {
--frame_buffer->ref_cnt;
}
VpxVideoDecoder::VpxVideoDecoder(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner)
: task_runner_(task_runner),
state_(kUninitialized),
vpx_codec_(NULL),
vpx_codec_alpha_(NULL) {}
VpxVideoDecoder::~VpxVideoDecoder() {
DCHECK_EQ(kUninitialized, state_);
CloseDecoder();
}
void VpxVideoDecoder::Initialize(const VideoDecoderConfig& config,
const PipelineStatusCB& status_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(config.IsValidConfig());
DCHECK(!config.is_encrypted());
DCHECK(decode_cb_.is_null());
if (!ConfigureDecoder(config)) {
status_cb.Run(DECODER_ERROR_NOT_SUPPORTED);
return;
}
config_ = config;
state_ = kNormal;
status_cb.Run(PIPELINE_OK);
}
static vpx_codec_ctx* InitializeVpxContext(vpx_codec_ctx* context,
const VideoDecoderConfig& config) {
context = new vpx_codec_ctx();
vpx_codec_dec_cfg_t vpx_config = {0};
vpx_config.w = config.coded_size().width();
vpx_config.h = config.coded_size().height();
vpx_config.threads = GetThreadCount(config);
vpx_codec_err_t status = vpx_codec_dec_init(context,
config.codec() == kCodecVP9 ?
vpx_codec_vp9_dx() :
vpx_codec_vp8_dx(),
&vpx_config,
0);
if (status != VPX_CODEC_OK) {
LOG(ERROR) << "vpx_codec_dec_init failed, status=" << status;
delete context;
return NULL;
}
return context;
}
bool VpxVideoDecoder::ConfigureDecoder(const VideoDecoderConfig& config) {
if (config.codec() != kCodecVP8 && config.codec() != kCodecVP9)
return false;
if (config.codec() == kCodecVP8 && config.format() != VideoFrame::YV12A)
return false;
CloseDecoder();
vpx_codec_ = InitializeVpxContext(vpx_codec_, config);
if (!vpx_codec_)
return false;
if (config.codec() == kCodecVP9) {
memory_pool_ = new MemoryPool();
if (vpx_codec_set_frame_buffer_functions(vpx_codec_,
&MemoryPool::GetVP9FrameBuffer,
&MemoryPool::ReleaseVP9FrameBuffer,
memory_pool_)) {
LOG(ERROR) << "Failed to configure external buffers.";
return false;
}
}
if (config.format() == VideoFrame::YV12A) {
vpx_codec_alpha_ = InitializeVpxContext(vpx_codec_alpha_, config);
if (!vpx_codec_alpha_)
return false;
}
return true;
}
void VpxVideoDecoder::CloseDecoder() {
if (vpx_codec_) {
vpx_codec_destroy(vpx_codec_);
delete vpx_codec_;
vpx_codec_ = NULL;
memory_pool_ = NULL;
}
if (vpx_codec_alpha_) {
vpx_codec_destroy(vpx_codec_alpha_);
delete vpx_codec_alpha_;
vpx_codec_alpha_ = NULL;
}
}
void VpxVideoDecoder::Decode(const scoped_refptr<DecoderBuffer>& buffer,
const DecodeCB& decode_cb) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(!decode_cb.is_null());
CHECK_NE(state_, kUninitialized);
CHECK(decode_cb_.is_null()) << "Overlapping decodes are not supported.";
decode_cb_ = BindToCurrentLoop(decode_cb);
if (state_ == kError) {
base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL);
return;
}
if (state_ == kDecodeFinished) {
base::ResetAndReturn(&decode_cb_).Run(kOk, VideoFrame::CreateEOSFrame());
return;
}
DecodeBuffer(buffer);
}
void VpxVideoDecoder::Reset(const base::Closure& closure) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(decode_cb_.is_null());
state_ = kNormal;
task_runner_->PostTask(FROM_HERE, closure);
}
void VpxVideoDecoder::Stop() {
DCHECK(task_runner_->BelongsToCurrentThread());
state_ = kUninitialized;
}
bool VpxVideoDecoder::HasAlpha() const {
return vpx_codec_alpha_ != NULL;
}
void VpxVideoDecoder::DecodeBuffer(const scoped_refptr<DecoderBuffer>& buffer) {
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_NE(state_, kUninitialized);
DCHECK_NE(state_, kDecodeFinished);
DCHECK_NE(state_, kError);
DCHECK(!decode_cb_.is_null());
DCHECK(buffer);
if (state_ == kNormal && buffer->end_of_stream()) {
state_ = kDecodeFinished;
base::ResetAndReturn(&decode_cb_).Run(kOk, VideoFrame::CreateEOSFrame());
return;
}
scoped_refptr<VideoFrame> video_frame;
if (!VpxDecode(buffer, &video_frame)) {
state_ = kError;
base::ResetAndReturn(&decode_cb_).Run(kDecodeError, NULL);
return;
}
if (!video_frame.get()) {
base::ResetAndReturn(&decode_cb_).Run(kNotEnoughData, NULL);
return;
}
base::ResetAndReturn(&decode_cb_).Run(kOk, video_frame);
}
bool VpxVideoDecoder::VpxDecode(const scoped_refptr<DecoderBuffer>& buffer,
scoped_refptr<VideoFrame>* video_frame) {
DCHECK(video_frame);
DCHECK(!buffer->end_of_stream());
int64 timestamp = buffer->timestamp().InMicroseconds();
void* user_priv = reinterpret_cast<void*>(×tamp);
vpx_codec_err_t status = vpx_codec_decode(vpx_codec_,
buffer->data(),
buffer->data_size(),
user_priv,
0);
if (status != VPX_CODEC_OK) {
LOG(ERROR) << "vpx_codec_decode() failed, status=" << status;
return false;
}
vpx_codec_iter_t iter = NULL;
const vpx_image_t* vpx_image = vpx_codec_get_frame(vpx_codec_, &iter);
if (!vpx_image) {
*video_frame = NULL;
return true;
}
if (vpx_image->user_priv != reinterpret_cast<void*>(×tamp)) {
LOG(ERROR) << "Invalid output timestamp.";
return false;
}
const vpx_image_t* vpx_image_alpha = NULL;
if (vpx_codec_alpha_ && buffer->side_data_size() >= 8) {
int64 timestamp_alpha = buffer->timestamp().InMicroseconds();
void* user_priv_alpha = reinterpret_cast<void*>(×tamp_alpha);
const uint64 side_data_id = base::NetToHost64(
*(reinterpret_cast<const uint64*>(buffer->side_data())));
if (side_data_id == 1) {
status = vpx_codec_decode(vpx_codec_alpha_,
buffer->side_data() + 8,
buffer->side_data_size() - 8,
user_priv_alpha,
0);
if (status != VPX_CODEC_OK) {
LOG(ERROR) << "vpx_codec_decode() failed on alpha, status=" << status;
return false;
}
vpx_codec_iter_t iter_alpha = NULL;
vpx_image_alpha = vpx_codec_get_frame(vpx_codec_alpha_, &iter_alpha);
if (!vpx_image_alpha) {
*video_frame = NULL;
return true;
}
if (vpx_image_alpha->user_priv !=
reinterpret_cast<void*>(×tamp_alpha)) {
LOG(ERROR) << "Invalid output timestamp on alpha.";
return false;
}
}
}
CopyVpxImageTo(vpx_image, vpx_image_alpha, video_frame);
(*video_frame)->SetTimestamp(base::TimeDelta::FromMicroseconds(timestamp));
return true;
}
void VpxVideoDecoder::CopyVpxImageTo(const vpx_image* vpx_image,
const struct vpx_image* vpx_image_alpha,
scoped_refptr<VideoFrame>* video_frame) {
CHECK(vpx_image);
CHECK(vpx_image->fmt == VPX_IMG_FMT_I420 ||
vpx_image->fmt == VPX_IMG_FMT_YV12);
gfx::Size size(vpx_image->d_w, vpx_image->d_h);
if (!vpx_codec_alpha_ && memory_pool_) {
*video_frame = VideoFrame::WrapExternalYuvData(
VideoFrame::YV12,
size, gfx::Rect(size), config_.natural_size(),
vpx_image->stride[VPX_PLANE_Y],
vpx_image->stride[VPX_PLANE_U],
vpx_image->stride[VPX_PLANE_V],
vpx_image->planes[VPX_PLANE_Y],
vpx_image->planes[VPX_PLANE_U],
vpx_image->planes[VPX_PLANE_V],
kNoTimestamp(),
memory_pool_->CreateFrameCallback(vpx_image->fb_priv));
return;
}
*video_frame = frame_pool_.CreateFrame(
vpx_codec_alpha_ ? VideoFrame::YV12A : VideoFrame::YV12,
size,
gfx::Rect(size),
config_.natural_size(),
kNoTimestamp());
CopyYPlane(vpx_image->planes[VPX_PLANE_Y],
vpx_image->stride[VPX_PLANE_Y],
vpx_image->d_h,
video_frame->get());
CopyUPlane(vpx_image->planes[VPX_PLANE_U],
vpx_image->stride[VPX_PLANE_U],
(vpx_image->d_h + 1) / 2,
video_frame->get());
CopyVPlane(vpx_image->planes[VPX_PLANE_V],
vpx_image->stride[VPX_PLANE_V],
(vpx_image->d_h + 1) / 2,
video_frame->get());
if (!vpx_codec_alpha_)
return;
if (!vpx_image_alpha) {
MakeOpaqueAPlane(
vpx_image->stride[VPX_PLANE_Y], vpx_image->d_h, video_frame->get());
return;
}
CopyAPlane(vpx_image_alpha->planes[VPX_PLANE_Y],
vpx_image->stride[VPX_PLANE_Y],
vpx_image->d_h,
video_frame->get());
}
}