This source file includes following definitions.
- ReadGoldenThumbnailMD5s
- num_decoded_frames
- stream_version_
- ProvidePictureBuffers
- DismissPictureBuffer
- PictureReady
- CallClientPictureReady
- NotifyEndOfBitstreamBuffer
- NotifyFlushDone
- NotifyResetDone
- NotifyError
- num_done_bitstream_buffers
- num_skipped_fragments
- num_queued_fragments
- decoder_deleted
- decode_calls_per_second_
- DoNothingReturnTrue
- CreateAndStartDecoder
- ProvidePictureBuffers
- DismissPictureBuffer
- PictureReady
- NotifyEndOfBitstreamBuffer
- NotifyFlushDone
- NotifyResetDone
- NotifyError
- OutputFrameDeliveryTimes
- NotifyFrameDropped
- LookingAtNAL
- SetState
- FinishInitialization
- DeleteDecoder
- GetBytesForFirstFragment
- GetBytesForNextFragment
- GetBytesForNextNALU
- GetBytesForNextFrame
- FragmentHasConfigInfo
- DecodeNextFragment
- num_decoded_frames
- frames_per_second
- decode_time_median
- SetUp
- TearDown
- ParseAndReadTestVideoData
- UpdateTestVideoFileParams
- InitializeRenderingHelper
- CreateAndStartDecoder
- WaitUntilDecodeFinish
- WaitUntilIdle
- OutputLogFile
- AssertWaitForStateOrDeleted
- TEST_P
- TEST_F
- main
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <algorithm>
#include <deque>
#include <map>
#include "testing/gtest/include/gtest/gtest.h"
#include "base/at_exit.h"
#include "base/bind.h"
#include "base/command_line.h"
#include "base/file_util.h"
#include "base/files/file.h"
#include "base/format_macros.h"
#include "base/md5.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/process/process.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_split.h"
#include "base/strings/stringize_macros.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread.h"
#include "content/common/gpu/media/rendering_helper.h"
#include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
#include "content/public/common/content_switches.h"
#include "media/filters/h264_parser.h"
#include "ui/gfx/codec/png_codec.h"
#if defined(OS_WIN)
#include "content/common/gpu/media/dxva_video_decode_accelerator.h"
#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
#include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
#include "content/common/gpu/media/v4l2_video_device.h"
#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
#include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
#include "content/common/gpu/media/vaapi_wrapper.h"
#if defined(USE_X11)
#include "ui/gl/gl_implementation.h"
#endif
#else
#error The VideoAccelerator tests are not supported on this platform.
#endif
using media::VideoDecodeAccelerator;
namespace content {
namespace {
const base::FilePath::CharType* g_test_video_data =
FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
const base::FilePath::CharType* g_output_log = NULL;
double g_rendering_fps = 0;
bool g_disable_rendering = false;
enum ResetPoint {
RESET_AFTER_FIRST_CONFIG_INFO = -4,
START_OF_STREAM_RESET = -3,
MID_STREAM_RESET = -2,
END_OF_STREAM_RESET = -1
};
const int kMaxResetAfterFrameNum = 100;
const int kMaxFramesToDelayReuse = 64;
const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
const int kWebRtcDecodeCallsPerSecond = 30;
struct TestVideoFile {
explicit TestVideoFile(base::FilePath::StringType file_name)
: file_name(file_name),
width(-1),
height(-1),
num_frames(-1),
num_fragments(-1),
min_fps_render(-1),
min_fps_no_render(-1),
profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
reset_after_frame_num(END_OF_STREAM_RESET) {
}
base::FilePath::StringType file_name;
int width;
int height;
int num_frames;
int num_fragments;
int min_fps_render;
int min_fps_no_render;
media::VideoCodecProfile profile;
int reset_after_frame_num;
std::string data_str;
};
const gfx::Size kThumbnailsDisplaySize(1366 - 1, 768);
const gfx::Size kThumbnailsPageSize(1600, 1200);
const gfx::Size kThumbnailSize(160, 120);
const int kMD5StringLength = 32;
void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
std::vector<std::string>* md5_strings) {
base::FilePath filepath(video_file->file_name);
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
std::string all_md5s;
base::ReadFileToString(filepath, &all_md5s);
base::SplitString(all_md5s, '\n', md5_strings);
for (std::vector<std::string>::iterator md5_string = md5_strings->begin();
md5_string != md5_strings->end(); ++md5_string) {
if (!md5_string->length())
continue;
if (md5_string->at(0) == '#')
continue;
CHECK_EQ(static_cast<int>(md5_string->length()),
kMD5StringLength) << *md5_string;
bool hex_only = std::count_if(md5_string->begin(),
md5_string->end(), isxdigit) ==
kMD5StringLength;
CHECK(hex_only) << *md5_string;
}
CHECK_GE(md5_strings->size(), 1U) << all_md5s;
}
enum ClientState {
CS_CREATED = 0,
CS_DECODER_SET = 1,
CS_INITIALIZED = 2,
CS_FLUSHING = 3,
CS_FLUSHED = 4,
CS_RESETTING = 5,
CS_RESET = 6,
CS_ERROR = 7,
CS_DESTROYED = 8,
CS_MAX,
};
class ThrottlingVDAClient : public VideoDecodeAccelerator::Client,
public base::SupportsWeakPtr<ThrottlingVDAClient> {
public:
typedef base::Callback<void(int32 picture_buffer_id)> ReusePictureCB;
ThrottlingVDAClient(VideoDecodeAccelerator::Client* client,
double fps,
ReusePictureCB reuse_picture_cb);
virtual ~ThrottlingVDAClient();
virtual void ProvidePictureBuffers(uint32 requested_num_of_buffers,
const gfx::Size& dimensions,
uint32 texture_target) OVERRIDE;
virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
virtual void PictureReady(const media::Picture& picture) OVERRIDE;
virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
virtual void NotifyFlushDone() OVERRIDE;
virtual void NotifyResetDone() OVERRIDE;
virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
int num_decoded_frames() { return num_decoded_frames_; }
private:
void CallClientPictureReady(int version);
VideoDecodeAccelerator::Client* client_;
ReusePictureCB reuse_picture_cb_;
base::TimeTicks next_frame_delivered_time_;
base::TimeDelta frame_duration_;
int num_decoded_frames_;
int stream_version_;
std::deque<media::Picture> pending_pictures_;
DISALLOW_IMPLICIT_CONSTRUCTORS(ThrottlingVDAClient);
};
ThrottlingVDAClient::ThrottlingVDAClient(VideoDecodeAccelerator::Client* client,
double fps,
ReusePictureCB reuse_picture_cb)
: client_(client),
reuse_picture_cb_(reuse_picture_cb),
num_decoded_frames_(0),
stream_version_(0) {
CHECK(client_);
CHECK_GT(fps, 0);
frame_duration_ = base::TimeDelta::FromSeconds(1) / fps;
}
ThrottlingVDAClient::~ThrottlingVDAClient() {}
void ThrottlingVDAClient::ProvidePictureBuffers(uint32 requested_num_of_buffers,
const gfx::Size& dimensions,
uint32 texture_target) {
client_->ProvidePictureBuffers(
requested_num_of_buffers, dimensions, texture_target);
}
void ThrottlingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
client_->DismissPictureBuffer(picture_buffer_id);
}
void ThrottlingVDAClient::PictureReady(const media::Picture& picture) {
++num_decoded_frames_;
if (pending_pictures_.empty()) {
base::TimeDelta delay =
next_frame_delivered_time_.is_null()
? base::TimeDelta()
: next_frame_delivered_time_ - base::TimeTicks::Now();
base::MessageLoop::current()->PostDelayedTask(
FROM_HERE,
base::Bind(&ThrottlingVDAClient::CallClientPictureReady,
AsWeakPtr(),
stream_version_),
delay);
}
pending_pictures_.push_back(picture);
}
void ThrottlingVDAClient::CallClientPictureReady(int version) {
if (version != stream_version_)
return;
base::TimeTicks now = base::TimeTicks::Now();
if (next_frame_delivered_time_.is_null())
next_frame_delivered_time_ = now;
if (next_frame_delivered_time_ + frame_duration_ < now) {
reuse_picture_cb_.Run(pending_pictures_.front().picture_buffer_id());
} else {
client_->PictureReady(pending_pictures_.front());
}
pending_pictures_.pop_front();
next_frame_delivered_time_ += frame_duration_;
if (!pending_pictures_.empty()) {
base::MessageLoop::current()->PostDelayedTask(
FROM_HERE,
base::Bind(&ThrottlingVDAClient::CallClientPictureReady,
AsWeakPtr(),
stream_version_),
next_frame_delivered_time_ - base::TimeTicks::Now());
}
}
void ThrottlingVDAClient::NotifyEndOfBitstreamBuffer(
int32 bitstream_buffer_id) {
client_->NotifyEndOfBitstreamBuffer(bitstream_buffer_id);
}
void ThrottlingVDAClient::NotifyFlushDone() {
if (!pending_pictures_.empty()) {
base::MessageLoop::current()->PostDelayedTask(
FROM_HERE,
base::Bind(&ThrottlingVDAClient::NotifyFlushDone,
base::Unretained(this)),
next_frame_delivered_time_ - base::TimeTicks::Now());
return;
}
client_->NotifyFlushDone();
}
void ThrottlingVDAClient::NotifyResetDone() {
++stream_version_;
while (!pending_pictures_.empty()) {
reuse_picture_cb_.Run(pending_pictures_.front().picture_buffer_id());
pending_pictures_.pop_front();
}
next_frame_delivered_time_ = base::TimeTicks();
client_->NotifyResetDone();
}
void ThrottlingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
client_->NotifyError(error);
}
class GLRenderingVDAClient
: public VideoDecodeAccelerator::Client,
public base::SupportsWeakPtr<GLRenderingVDAClient> {
public:
GLRenderingVDAClient(RenderingHelper* rendering_helper,
int rendering_window_id,
ClientStateNotification<ClientState>* note,
const std::string& encoded_data,
int num_in_flight_decodes,
int num_play_throughs,
int reset_after_frame_num,
int delete_decoder_state,
int frame_width,
int frame_height,
media::VideoCodecProfile profile,
double rendering_fps,
bool suppress_rendering,
int delay_reuse_after_frame_num,
int decode_calls_per_second);
virtual ~GLRenderingVDAClient();
void CreateAndStartDecoder();
virtual void ProvidePictureBuffers(uint32 requested_num_of_buffers,
const gfx::Size& dimensions,
uint32 texture_target) OVERRIDE;
virtual void DismissPictureBuffer(int32 picture_buffer_id) OVERRIDE;
virtual void PictureReady(const media::Picture& picture) OVERRIDE;
virtual void NotifyEndOfBitstreamBuffer(int32 bitstream_buffer_id) OVERRIDE;
virtual void NotifyFlushDone() OVERRIDE;
virtual void NotifyResetDone() OVERRIDE;
virtual void NotifyError(VideoDecodeAccelerator::Error error) OVERRIDE;
void OutputFrameDeliveryTimes(base::File* output);
void NotifyFrameDropped(int32 picture_buffer_id);
int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
int num_skipped_fragments() { return num_skipped_fragments_; }
int num_queued_fragments() { return num_queued_fragments_; }
int num_decoded_frames();
double frames_per_second();
int decode_time_median();
bool decoder_deleted() { return !decoder_.get(); }
private:
typedef std::map<int, media::PictureBuffer*> PictureBufferById;
void SetState(ClientState new_state);
void FinishInitialization();
void DeleteDecoder();
std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
void GetBytesForNextNALU(size_t start_pos, size_t* end_pos);
std::string GetBytesForNextFrame(
size_t start_pos, size_t* end_pos);
void DecodeNextFragment();
RenderingHelper* rendering_helper_;
int rendering_window_id_;
std::string encoded_data_;
const int num_in_flight_decodes_;
int outstanding_decodes_;
size_t encoded_data_next_pos_to_decode_;
int next_bitstream_buffer_id_;
ClientStateNotification<ClientState>* note_;
scoped_ptr<VideoDecodeAccelerator> decoder_;
scoped_ptr<base::WeakPtrFactory<VideoDecodeAccelerator> >
weak_decoder_factory_;
std::set<int> outstanding_texture_ids_;
int remaining_play_throughs_;
int reset_after_frame_num_;
int delete_decoder_state_;
ClientState state_;
int num_skipped_fragments_;
int num_queued_fragments_;
int num_decoded_frames_;
int num_done_bitstream_buffers_;
PictureBufferById picture_buffers_by_id_;
base::TimeTicks initialize_done_ticks_;
media::VideoCodecProfile profile_;
GLenum texture_target_;
bool suppress_rendering_;
std::vector<base::TimeTicks> frame_delivery_times_;
int delay_reuse_after_frame_num_;
scoped_ptr<ThrottlingVDAClient> throttling_client_;
std::map<int, base::TimeTicks> decode_start_time_;
std::vector<base::TimeDelta> decode_time_;
int decode_calls_per_second_;
DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
};
GLRenderingVDAClient::GLRenderingVDAClient(
RenderingHelper* rendering_helper,
int rendering_window_id,
ClientStateNotification<ClientState>* note,
const std::string& encoded_data,
int num_in_flight_decodes,
int num_play_throughs,
int reset_after_frame_num,
int delete_decoder_state,
int frame_width,
int frame_height,
media::VideoCodecProfile profile,
double rendering_fps,
bool suppress_rendering,
int delay_reuse_after_frame_num,
int decode_calls_per_second)
: rendering_helper_(rendering_helper),
rendering_window_id_(rendering_window_id),
encoded_data_(encoded_data),
num_in_flight_decodes_(num_in_flight_decodes),
outstanding_decodes_(0),
encoded_data_next_pos_to_decode_(0),
next_bitstream_buffer_id_(0),
note_(note),
remaining_play_throughs_(num_play_throughs),
reset_after_frame_num_(reset_after_frame_num),
delete_decoder_state_(delete_decoder_state),
state_(CS_CREATED),
num_skipped_fragments_(0),
num_queued_fragments_(0),
num_decoded_frames_(0),
num_done_bitstream_buffers_(0),
texture_target_(0),
suppress_rendering_(suppress_rendering),
delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
decode_calls_per_second_(decode_calls_per_second) {
CHECK_GT(num_in_flight_decodes, 0);
CHECK_GT(num_play_throughs, 0);
CHECK_GE(rendering_fps, 0);
if (decode_calls_per_second_ > 0)
CHECK_EQ(1, num_in_flight_decodes_);
profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
? profile
: media::H264PROFILE_BASELINE);
if (rendering_fps > 0)
throttling_client_.reset(new ThrottlingVDAClient(
this,
rendering_fps,
base::Bind(&GLRenderingVDAClient::NotifyFrameDropped,
base::Unretained(this))));
}
GLRenderingVDAClient::~GLRenderingVDAClient() {
DeleteDecoder();
CHECK(decoder_deleted());
STLDeleteValues(&picture_buffers_by_id_);
SetState(CS_DESTROYED);
}
static bool DoNothingReturnTrue() { return true; }
void GLRenderingVDAClient::CreateAndStartDecoder() {
CHECK(decoder_deleted());
CHECK(!decoder_.get());
VideoDecodeAccelerator::Client* client = this;
base::WeakPtr<VideoDecodeAccelerator::Client> weak_client = AsWeakPtr();
if (throttling_client_) {
client = throttling_client_.get();
weak_client = throttling_client_->AsWeakPtr();
}
#if defined(OS_WIN)
decoder_.reset(
new DXVAVideoDecodeAccelerator(base::Bind(&DoNothingReturnTrue)));
#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_ARMEL)
scoped_ptr<V4L2Device> device = V4L2Device::Create(
static_cast<EGLContext>(rendering_helper_->GetGLContext()));
if (!device.get()) {
NotifyError(media::VideoDecodeAccelerator::PLATFORM_FAILURE);
return;
}
decoder_.reset(new V4L2VideoDecodeAccelerator(
static_cast<EGLDisplay>(rendering_helper_->GetGLDisplay()),
weak_client,
base::Bind(&DoNothingReturnTrue),
device.Pass(),
base::MessageLoopProxy::current()));
#elif defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
CHECK_EQ(gfx::kGLImplementationDesktopGL, gfx::GetGLImplementation())
<< "Hardware video decode does not work with OSMesa";
decoder_.reset(new VaapiVideoDecodeAccelerator(
static_cast<Display*>(rendering_helper_->GetGLDisplay()),
base::Bind(&DoNothingReturnTrue)));
#endif
CHECK(decoder_.get());
weak_decoder_factory_.reset(
new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get()));
SetState(CS_DECODER_SET);
if (decoder_deleted())
return;
CHECK(decoder_->Initialize(profile_, client));
FinishInitialization();
}
void GLRenderingVDAClient::ProvidePictureBuffers(
uint32 requested_num_of_buffers,
const gfx::Size& dimensions,
uint32 texture_target) {
if (decoder_deleted())
return;
std::vector<media::PictureBuffer> buffers;
texture_target_ = texture_target;
for (uint32 i = 0; i < requested_num_of_buffers; ++i) {
uint32 id = picture_buffers_by_id_.size();
uint32 texture_id;
base::WaitableEvent done(false, false);
rendering_helper_->CreateTexture(
rendering_window_id_, texture_target_, &texture_id, &done);
done.Wait();
CHECK(outstanding_texture_ids_.insert(texture_id).second);
media::PictureBuffer* buffer =
new media::PictureBuffer(id, dimensions, texture_id);
CHECK(picture_buffers_by_id_.insert(std::make_pair(id, buffer)).second);
buffers.push_back(*buffer);
}
decoder_->AssignPictureBuffers(buffers);
}
void GLRenderingVDAClient::DismissPictureBuffer(int32 picture_buffer_id) {
PictureBufferById::iterator it =
picture_buffers_by_id_.find(picture_buffer_id);
CHECK(it != picture_buffers_by_id_.end());
CHECK_EQ(outstanding_texture_ids_.erase(it->second->texture_id()), 1U);
rendering_helper_->DeleteTexture(it->second->texture_id());
delete it->second;
picture_buffers_by_id_.erase(it);
}
void GLRenderingVDAClient::PictureReady(const media::Picture& picture) {
CHECK_LT(state_, CS_RESET);
if (decoder_deleted())
return;
base::TimeTicks now = base::TimeTicks::Now();
frame_delivery_times_.push_back(now);
std::map<int, base::TimeTicks>::iterator it =
decode_start_time_.find(picture.bitstream_buffer_id());
ASSERT_NE(decode_start_time_.end(), it);
decode_time_.push_back(now - it->second);
decode_start_time_.erase(it);
CHECK_LE(picture.bitstream_buffer_id(), next_bitstream_buffer_id_);
++num_decoded_frames_;
if (remaining_play_throughs_ == 1 &&
reset_after_frame_num_ == num_decoded_frames()) {
reset_after_frame_num_ = MID_STREAM_RESET;
decoder_->Reset();
encoded_data_next_pos_to_decode_ = 0;
}
media::PictureBuffer* picture_buffer =
picture_buffers_by_id_[picture.picture_buffer_id()];
CHECK(picture_buffer);
if (!suppress_rendering_) {
rendering_helper_->RenderTexture(texture_target_,
picture_buffer->texture_id());
}
if (num_decoded_frames() > delay_reuse_after_frame_num_) {
base::MessageLoop::current()->PostDelayedTask(
FROM_HERE,
base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
weak_decoder_factory_->GetWeakPtr(),
picture.picture_buffer_id()),
kReuseDelay);
} else {
decoder_->ReusePictureBuffer(picture.picture_buffer_id());
}
}
void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
int32 bitstream_buffer_id) {
++num_done_bitstream_buffers_;
--outstanding_decodes_;
if (decode_calls_per_second_ == 0)
DecodeNextFragment();
}
void GLRenderingVDAClient::NotifyFlushDone() {
if (decoder_deleted())
return;
SetState(CS_FLUSHED);
--remaining_play_throughs_;
DCHECK_GE(remaining_play_throughs_, 0);
if (decoder_deleted())
return;
decoder_->Reset();
SetState(CS_RESETTING);
}
void GLRenderingVDAClient::NotifyResetDone() {
if (decoder_deleted())
return;
if (reset_after_frame_num_ == MID_STREAM_RESET) {
reset_after_frame_num_ = END_OF_STREAM_RESET;
DecodeNextFragment();
return;
} else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
reset_after_frame_num_ = END_OF_STREAM_RESET;
for (int i = 0; i < num_in_flight_decodes_; ++i)
DecodeNextFragment();
return;
}
if (remaining_play_throughs_) {
encoded_data_next_pos_to_decode_ = 0;
FinishInitialization();
return;
}
SetState(CS_RESET);
if (!decoder_deleted())
DeleteDecoder();
}
void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
SetState(CS_ERROR);
}
void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
frame_delivery_times_.size());
output->WriteAtCurrentPos(s.data(), s.length());
base::TimeTicks t0 = initialize_done_ticks_;
for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
i,
(frame_delivery_times_[i] - t0).InMicroseconds());
t0 = frame_delivery_times_[i];
output->WriteAtCurrentPos(s.data(), s.length());
}
}
void GLRenderingVDAClient::NotifyFrameDropped(int32 picture_buffer_id) {
decoder_->ReusePictureBuffer(picture_buffer_id);
}
static bool LookingAtNAL(const std::string& encoded, size_t pos) {
return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
}
void GLRenderingVDAClient::SetState(ClientState new_state) {
note_->Notify(new_state);
state_ = new_state;
if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
CHECK(!decoder_deleted());
DeleteDecoder();
}
}
void GLRenderingVDAClient::FinishInitialization() {
SetState(CS_INITIALIZED);
initialize_done_ticks_ = base::TimeTicks::Now();
if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
reset_after_frame_num_ = MID_STREAM_RESET;
decoder_->Reset();
return;
}
for (int i = 0; i < num_in_flight_decodes_; ++i)
DecodeNextFragment();
DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
}
void GLRenderingVDAClient::DeleteDecoder() {
if (decoder_deleted())
return;
weak_decoder_factory_.reset();
decoder_.release()->Destroy();
STLClearObject(&encoded_data_);
for (std::set<int>::iterator it = outstanding_texture_ids_.begin();
it != outstanding_texture_ids_.end(); ++it) {
rendering_helper_->DeleteTexture(*it);
}
outstanding_texture_ids_.clear();
for (int i = state_ + 1; i < CS_MAX; ++i)
SetState(static_cast<ClientState>(i));
}
std::string GLRenderingVDAClient::GetBytesForFirstFragment(
size_t start_pos, size_t* end_pos) {
if (profile_ < media::H264PROFILE_MAX) {
*end_pos = start_pos;
while (*end_pos + 4 < encoded_data_.size()) {
if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7)
return GetBytesForNextFragment(*end_pos, end_pos);
GetBytesForNextNALU(*end_pos, end_pos);
num_skipped_fragments_++;
}
*end_pos = start_pos;
return std::string();
}
DCHECK_LE(profile_, media::VP8PROFILE_MAX);
return GetBytesForNextFragment(start_pos, end_pos);
}
std::string GLRenderingVDAClient::GetBytesForNextFragment(
size_t start_pos, size_t* end_pos) {
if (profile_ < media::H264PROFILE_MAX) {
*end_pos = start_pos;
GetBytesForNextNALU(*end_pos, end_pos);
if (start_pos != *end_pos) {
num_queued_fragments_++;
}
return encoded_data_.substr(start_pos, *end_pos - start_pos);
}
DCHECK_LE(profile_, media::VP8PROFILE_MAX);
return GetBytesForNextFrame(start_pos, end_pos);
}
void GLRenderingVDAClient::GetBytesForNextNALU(
size_t start_pos, size_t* end_pos) {
*end_pos = start_pos;
if (*end_pos + 4 > encoded_data_.size())
return;
CHECK(LookingAtNAL(encoded_data_, start_pos));
*end_pos += 4;
while (*end_pos + 4 <= encoded_data_.size() &&
!LookingAtNAL(encoded_data_, *end_pos)) {
++*end_pos;
}
if (*end_pos + 3 >= encoded_data_.size())
*end_pos = encoded_data_.size();
}
std::string GLRenderingVDAClient::GetBytesForNextFrame(
size_t start_pos, size_t* end_pos) {
std::string bytes;
if (start_pos == 0)
start_pos = 32;
*end_pos = start_pos;
uint32 frame_size = *reinterpret_cast<uint32*>(&encoded_data_[*end_pos]);
*end_pos += 12;
bytes.append(encoded_data_.substr(*end_pos, frame_size));
*end_pos += frame_size;
num_queued_fragments_++;
return bytes;
}
static bool FragmentHasConfigInfo(const uint8* data, size_t size,
media::VideoCodecProfile profile) {
if (profile >= media::H264PROFILE_MIN &&
profile <= media::H264PROFILE_MAX) {
media::H264Parser parser;
parser.SetStream(data, size);
media::H264NALU nalu;
media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
if (result != media::H264Parser::kOk) {
return false;
}
return nalu.nal_unit_type == media::H264NALU::kSPS;
} else if (profile >= media::VP8PROFILE_MIN &&
profile <= media::VP8PROFILE_MAX) {
return (size > 0 && !(data[0] & 0x01));
}
LOG(FATAL) << "Invalid profile: " << profile;
return false;
}
void GLRenderingVDAClient::DecodeNextFragment() {
if (decoder_deleted())
return;
if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
if (outstanding_decodes_ == 0) {
decoder_->Flush();
SetState(CS_FLUSHING);
}
return;
}
size_t end_pos;
std::string next_fragment_bytes;
if (encoded_data_next_pos_to_decode_ == 0) {
next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
} else {
next_fragment_bytes =
GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
}
size_t next_fragment_size = next_fragment_bytes.size();
bool reset_here = false;
if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
reset_here = FragmentHasConfigInfo(
reinterpret_cast<const uint8*>(next_fragment_bytes.data()),
next_fragment_size,
profile_);
if (reset_here)
reset_after_frame_num_ = END_OF_STREAM_RESET;
}
base::SharedMemory shm;
CHECK(shm.CreateAndMapAnonymous(next_fragment_size));
memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
base::SharedMemoryHandle dup_handle;
CHECK(shm.ShareToProcess(base::Process::Current().handle(), &dup_handle));
media::BitstreamBuffer bitstream_buffer(
next_bitstream_buffer_id_, dup_handle, next_fragment_size);
decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
decoder_->Decode(bitstream_buffer);
++outstanding_decodes_;
if (!remaining_play_throughs_ &&
-delete_decoder_state_ == next_bitstream_buffer_id_) {
DeleteDecoder();
}
if (reset_here) {
reset_after_frame_num_ = MID_STREAM_RESET;
decoder_->Reset();
encoded_data_next_pos_to_decode_ = 0;
} else {
encoded_data_next_pos_to_decode_ = end_pos;
}
if (decode_calls_per_second_ > 0) {
base::MessageLoop::current()->PostDelayedTask(
FROM_HERE,
base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
}
}
int GLRenderingVDAClient::num_decoded_frames() {
return throttling_client_ ? throttling_client_->num_decoded_frames()
: num_decoded_frames_;
}
double GLRenderingVDAClient::frames_per_second() {
base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
if (delta.InSecondsF() == 0)
return 0;
return num_decoded_frames() / delta.InSecondsF();
}
int GLRenderingVDAClient::decode_time_median() {
if (decode_time_.size() == 0)
return 0;
std::sort(decode_time_.begin(), decode_time_.end());
int index = decode_time_.size() / 2;
if (decode_time_.size() % 2 != 0)
return decode_time_[index].InMilliseconds();
return (decode_time_[index] + decode_time_[index - 1]).InMilliseconds() / 2;
}
class VideoDecodeAcceleratorTest : public ::testing::Test {
protected:
VideoDecodeAcceleratorTest();
virtual void SetUp();
virtual void TearDown();
void ParseAndReadTestVideoData(base::FilePath::StringType data,
std::vector<TestVideoFile*>* test_video_files);
void UpdateTestVideoFileParams(
size_t num_concurrent_decoders,
int reset_point,
std::vector<TestVideoFile*>* test_video_files);
void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
void CreateAndStartDecoder(GLRenderingVDAClient* client,
ClientStateNotification<ClientState>* note);
void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
void WaitUntilIdle();
void OutputLogFile(const base::FilePath::CharType* log_path,
const std::string& content);
std::vector<TestVideoFile*> test_video_files_;
RenderingHelper rendering_helper_;
scoped_refptr<base::MessageLoopProxy> rendering_loop_proxy_;
private:
base::Thread rendering_thread_;
base::ShadowingAtExitManager at_exit_manager_;
DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
};
VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest()
: rendering_thread_("GLRenderingVDAClientThread") {}
void VideoDecodeAcceleratorTest::SetUp() {
ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
base::Thread::Options options;
options.message_loop_type = base::MessageLoop::TYPE_DEFAULT;
#if defined(OS_WIN)
options.message_loop_type = base::MessageLoop::TYPE_UI;
#endif
rendering_thread_.StartWithOptions(options);
rendering_loop_proxy_ = rendering_thread_.message_loop_proxy();
}
void VideoDecodeAcceleratorTest::TearDown() {
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&STLDeleteElements<std::vector<TestVideoFile*> >,
&test_video_files_));
base::WaitableEvent done(false, false);
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&RenderingHelper::UnInitialize,
base::Unretained(&rendering_helper_),
&done));
done.Wait();
rendering_thread_.Stop();
}
void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
base::FilePath::StringType data,
std::vector<TestVideoFile*>* test_video_files) {
std::vector<base::FilePath::StringType> entries;
base::SplitString(data, ';', &entries);
CHECK_GE(entries.size(), 1U) << data;
for (size_t index = 0; index < entries.size(); ++index) {
std::vector<base::FilePath::StringType> fields;
base::SplitString(entries[index], ':', &fields);
CHECK_GE(fields.size(), 1U) << entries[index];
CHECK_LE(fields.size(), 8U) << entries[index];
TestVideoFile* video_file = new TestVideoFile(fields[0]);
if (!fields[1].empty())
CHECK(base::StringToInt(fields[1], &video_file->width));
if (!fields[2].empty())
CHECK(base::StringToInt(fields[2], &video_file->height));
if (!fields[3].empty())
CHECK(base::StringToInt(fields[3], &video_file->num_frames));
if (!fields[4].empty())
CHECK(base::StringToInt(fields[4], &video_file->num_fragments));
if (!fields[5].empty())
CHECK(base::StringToInt(fields[5], &video_file->min_fps_render));
if (!fields[6].empty())
CHECK(base::StringToInt(fields[6], &video_file->min_fps_no_render));
int profile = -1;
if (!fields[7].empty())
CHECK(base::StringToInt(fields[7], &profile));
video_file->profile = static_cast<media::VideoCodecProfile>(profile);
base::FilePath filepath(video_file->file_name);
CHECK(base::ReadFileToString(filepath, &video_file->data_str))
<< "test_video_file: " << filepath.MaybeAsASCII();
test_video_files->push_back(video_file);
}
}
void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
size_t num_concurrent_decoders,
int reset_point,
std::vector<TestVideoFile*>* test_video_files) {
for (size_t i = 0; i < test_video_files->size(); i++) {
TestVideoFile* video_file = (*test_video_files)[i];
if (reset_point == MID_STREAM_RESET) {
video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
if (video_file->num_frames <= video_file->reset_after_frame_num)
video_file->reset_after_frame_num = video_file->num_frames / 2;
video_file->num_frames += video_file->reset_after_frame_num;
} else {
video_file->reset_after_frame_num = reset_point;
}
if (video_file->min_fps_render != -1)
video_file->min_fps_render /= num_concurrent_decoders;
if (video_file->min_fps_no_render != -1)
video_file->min_fps_no_render /= num_concurrent_decoders;
}
}
void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
const RenderingHelperParams& helper_params) {
base::WaitableEvent done(false, false);
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&RenderingHelper::Initialize,
base::Unretained(&rendering_helper_),
helper_params,
&done));
done.Wait();
}
void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
GLRenderingVDAClient* client,
ClientStateNotification<ClientState>* note) {
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
base::Unretained(client)));
ASSERT_EQ(note->Wait(), CS_DECODER_SET);
}
void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
ClientStateNotification<ClientState>* note) {
for (int i = 0; i < CS_MAX; i++) {
if (note->Wait() == CS_DESTROYED)
break;
}
}
void VideoDecodeAcceleratorTest::WaitUntilIdle() {
base::WaitableEvent done(false, false);
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
done.Wait();
}
void VideoDecodeAcceleratorTest::OutputLogFile(
const base::FilePath::CharType* log_path,
const std::string& content) {
base::File file(base::FilePath(log_path),
base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
file.WriteAtCurrentPos(content.data(), content.length());
}
class VideoDecodeAcceleratorParamTest
: public VideoDecodeAcceleratorTest,
public ::testing::WithParamInterface<
Tuple7<int, int, int, ResetPoint, ClientState, bool, bool> > {
};
::std::ostream& operator<<(
::std::ostream& os,
const Tuple7<int, int, int, ResetPoint, ClientState, bool, bool>& t) {
return os << t.a << ", " << t.b << ", " << t.c << ", " << t.d << ", " << t.e
<< ", " << t.f << ", " << t.g;
}
static void AssertWaitForStateOrDeleted(
ClientStateNotification<ClientState>* note,
GLRenderingVDAClient* client,
ClientState expected_state) {
ClientState state = note->Wait();
if (state == expected_state) return;
ASSERT_TRUE(client->decoder_deleted())
<< "Decoder not deleted but Wait() returned " << state
<< ", instead of " << expected_state;
}
enum { kMinSupportedNumConcurrentDecoders = 3 };
TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
const size_t num_concurrent_decoders = GetParam().a;
const size_t num_in_flight_decodes = GetParam().b;
const int num_play_throughs = GetParam().c;
const int reset_point = GetParam().d;
const int delete_decoder_state = GetParam().e;
bool test_reuse_delay = GetParam().f;
const bool render_as_thumbnails = GetParam().g;
UpdateTestVideoFileParams(
num_concurrent_decoders, reset_point, &test_video_files_);
const bool suppress_rendering = g_disable_rendering;
std::vector<ClientStateNotification<ClientState>*>
notes(num_concurrent_decoders, NULL);
std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
RenderingHelperParams helper_params;
helper_params.num_windows = num_concurrent_decoders;
helper_params.render_as_thumbnails = render_as_thumbnails;
if (render_as_thumbnails) {
CHECK_EQ(num_concurrent_decoders, 1U);
gfx::Size frame_size(test_video_files_[0]->width,
test_video_files_[0]->height);
helper_params.frame_dimensions.push_back(frame_size);
helper_params.window_dimensions.push_back(kThumbnailsDisplaySize);
helper_params.thumbnails_page_size = kThumbnailsPageSize;
helper_params.thumbnail_size = kThumbnailSize;
} else {
for (size_t index = 0; index < test_video_files_.size(); ++index) {
gfx::Size frame_size(test_video_files_[index]->width,
test_video_files_[index]->height);
helper_params.frame_dimensions.push_back(frame_size);
helper_params.window_dimensions.push_back(frame_size);
}
}
InitializeRenderingHelper(helper_params);
for (size_t index = 0; index < num_concurrent_decoders; ++index) {
TestVideoFile* video_file =
test_video_files_[index % test_video_files_.size()];
ClientStateNotification<ClientState>* note =
new ClientStateNotification<ClientState>();
notes[index] = note;
int delay_after_frame_num = std::numeric_limits<int>::max();
if (test_reuse_delay &&
kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
}
GLRenderingVDAClient* client =
new GLRenderingVDAClient(&rendering_helper_,
index,
note,
video_file->data_str,
num_in_flight_decodes,
num_play_throughs,
video_file->reset_after_frame_num,
delete_decoder_state,
video_file->width,
video_file->height,
video_file->profile,
g_rendering_fps,
suppress_rendering,
delay_after_frame_num,
0);
clients[index] = client;
CreateAndStartDecoder(client, note);
}
bool skip_performance_and_correctness_checks = num_play_throughs > 1;
for (size_t i = 0; i < num_concurrent_decoders; ++i) {
ClientStateNotification<ClientState>* note = notes[i];
ClientState state = note->Wait();
if (state != CS_INITIALIZED) {
skip_performance_and_correctness_checks = true;
ASSERT_GT(num_concurrent_decoders,
static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
continue;
}
ASSERT_EQ(state, CS_INITIALIZED);
for (int n = 0; n < num_play_throughs; ++n) {
if (n > 0) {
ASSERT_NO_FATAL_FAILURE(
AssertWaitForStateOrDeleted(note, clients[i], CS_INITIALIZED));
}
ASSERT_NO_FATAL_FAILURE(
AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHING));
ASSERT_NO_FATAL_FAILURE(
AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHED));
ASSERT_NO_FATAL_FAILURE(
AssertWaitForStateOrDeleted(note, clients[i], CS_RESETTING));
}
ASSERT_NO_FATAL_FAILURE(
AssertWaitForStateOrDeleted(note, clients[i], CS_RESET));
ASSERT_NO_FATAL_FAILURE(
AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
}
for (size_t i = 0; i < num_concurrent_decoders &&
!skip_performance_and_correctness_checks; ++i) {
if (delete_decoder_state < CS_FLUSHED)
continue;
GLRenderingVDAClient* client = clients[i];
TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
if (video_file->num_frames > 0) {
if (video_file->reset_after_frame_num > 0)
EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
else
EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
}
if (reset_point == END_OF_STREAM_RESET) {
EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
client->num_queued_fragments());
EXPECT_EQ(client->num_done_bitstream_buffers(),
client->num_queued_fragments());
}
VLOG(0) << "Decoder " << i << " fps: " << client->frames_per_second();
if (!render_as_thumbnails) {
int min_fps = suppress_rendering ?
video_file->min_fps_no_render : video_file->min_fps_render;
if (min_fps > 0 && !test_reuse_delay)
EXPECT_GT(client->frames_per_second(), min_fps);
}
}
if (render_as_thumbnails) {
std::vector<unsigned char> rgb;
bool alpha_solid;
base::WaitableEvent done(false, false);
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
base::Unretained(&rendering_helper_),
&rgb, &alpha_solid, &done));
done.Wait();
std::vector<std::string> golden_md5s;
std::string md5_string = base::MD5String(
base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
std::vector<std::string>::iterator match =
find(golden_md5s.begin(), golden_md5s.end(), md5_string);
if (match == golden_md5s.end()) {
std::vector<unsigned char> png;
gfx::PNGCodec::Encode(&rgb[0],
gfx::PNGCodec::FORMAT_RGB,
kThumbnailsPageSize,
kThumbnailsPageSize.width() * 3,
true,
std::vector<gfx::PNGCodec::Comment>(),
&png);
LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
base::FilePath filepath(test_video_files_[0]->file_name);
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
int num_bytes = base::WriteFile(filepath,
reinterpret_cast<char*>(&png[0]),
png.size());
ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
}
ASSERT_NE(match, golden_md5s.end());
EXPECT_EQ(alpha_solid, true) << "RGBA frame had incorrect alpha";
}
if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
base::File output_file(
base::FilePath(g_output_log),
base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
for (size_t i = 0; i < num_concurrent_decoders; ++i) {
clients[i]->OutputFrameDeliveryTimes(&output_file);
}
}
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*> >,
&clients));
rendering_loop_proxy_->PostTask(
FROM_HERE,
base::Bind(&STLDeleteElements<
std::vector<ClientStateNotification<ClientState>*> >,
¬es));
WaitUntilIdle();
};
INSTANTIATE_TEST_CASE_P(
ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(
1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
MidStreamReset, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
SlowRendering, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
INSTANTIATE_TEST_CASE_P(
TearDownTiming, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHING, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
static_cast<ClientState>(-1), false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
static_cast<ClientState>(-10), false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
static_cast<ClientState>(-100), false, false)));
INSTANTIATE_TEST_CASE_P(
DecodeVariations, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
MakeTuple(1, 15, 1, END_OF_STREAM_RESET, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
ResourceExhaustion, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
END_OF_STREAM_RESET, CS_RESET, false, false),
MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
END_OF_STREAM_RESET, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
Thumbnail, VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
RenderingHelperParams helper_params;
helper_params.num_windows = 1;
helper_params.render_as_thumbnails = false;
gfx::Size frame_size(test_video_files_[0]->width,
test_video_files_[0]->height);
helper_params.frame_dimensions.push_back(frame_size);
helper_params.window_dimensions.push_back(frame_size);
InitializeRenderingHelper(helper_params);
ClientStateNotification<ClientState>* note =
new ClientStateNotification<ClientState>();
GLRenderingVDAClient* client =
new GLRenderingVDAClient(&rendering_helper_,
0,
note,
test_video_files_[0]->data_str,
1,
1,
test_video_files_[0]->reset_after_frame_num,
CS_RESET,
test_video_files_[0]->width,
test_video_files_[0]->height,
test_video_files_[0]->profile,
g_rendering_fps,
true,
std::numeric_limits<int>::max(),
kWebRtcDecodeCallsPerSecond);
CreateAndStartDecoder(client, note);
WaitUntilDecodeFinish(note);
int decode_time_median = client->decode_time_median();
std::string output_string =
base::StringPrintf("Decode time median: %d ms", decode_time_median);
VLOG(0) << output_string;
ASSERT_GT(decode_time_median, 0);
if (g_output_log != NULL)
OutputLogFile(g_output_log, output_string);
rendering_loop_proxy_->DeleteSoon(FROM_HERE, client);
rendering_loop_proxy_->DeleteSoon(FROM_HERE, note);
WaitUntilIdle();
};
}
}
int main(int argc, char **argv) {
testing::InitGoogleTest(&argc, argv);
CommandLine::Init(argc, argv);
logging::LoggingSettings settings;
settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
CHECK(logging::InitLogging(settings));
CommandLine* cmd_line = CommandLine::ForCurrentProcess();
DCHECK(cmd_line);
CommandLine::SwitchMap switches = cmd_line->GetSwitches();
for (CommandLine::SwitchMap::const_iterator it = switches.begin();
it != switches.end(); ++it) {
if (it->first == "test_video_data") {
content::g_test_video_data = it->second.c_str();
continue;
}
if (it->first == "frame_delivery_log" || it->first == "output_log") {
content::g_output_log = it->second.c_str();
continue;
}
if (it->first == "rendering_fps") {
std::string input(it->second.begin(), it->second.end());
CHECK(base::StringToDouble(input, &content::g_rendering_fps));
continue;
}
if (it->first == "disable_rendering") {
content::g_disable_rendering = true;
continue;
}
if (it->first == "v" || it->first == "vmodule")
continue;
LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
}
base::ShadowingAtExitManager at_exit_manager;
return RUN_ALL_TESTS();
}