This source file includes following definitions.
- SetUpCommandLine
- IN_PROC_BROWSER_TEST_F
- IN_PROC_BROWSER_TEST_F
- IN_PROC_BROWSER_TEST_F
- IN_PROC_BROWSER_TEST_F
- current_tone_frequency_
- WaitForColorAndTone
- NotifyOnceMatched
- NotifyIfMatched
- OnAudioFrame
- OnVideoFrame
- UpdateExponentialMovingAverage
- ComputeMedianIntensityInPlane
- SetUp
- SetUpCommandLine
- IN_PROC_BROWSER_TEST_F
- IN_PROC_BROWSER_TEST_F
#include <algorithm>
#include <cmath>
#include "base/command_line.h"
#include "base/float_util.h"
#include "base/run_loop.h"
#include "base/strings/stringprintf.h"
#include "base/synchronization/lock.h"
#include "base/time/time.h"
#include "chrome/browser/extensions/extension_apitest.h"
#include "chrome/common/chrome_switches.h"
#include "content/public/common/content_switches.h"
#include "extensions/common/switches.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_frame.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_environment.h"
#include "media/cast/test/utility/audio_utility.h"
#include "media/cast/test/utility/default_config.h"
#include "media/cast/test/utility/in_process_receiver.h"
#include "media/cast/test/utility/standalone_cast_environment.h"
#include "net/base/net_errors.h"
#include "net/base/net_util.h"
#include "net/base/rand_callback.h"
#include "net/udp/udp_socket.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace extensions {
class CastStreamingApiTest : public ExtensionApiTest {
public:
virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
ExtensionApiTest::SetUpCommandLine(command_line);
command_line->AppendSwitchASCII(
extensions::switches::kWhitelistedExtensionID,
"ddchlicdkolnonkihahngkmmmjnjlkkf");
}
};
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Basics) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html")) << message_;
}
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Stats) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "stats.html")) << message_;
}
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, BadLogging) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html"))
<< message_;
}
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, DestinationNotSet) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "destination_not_set.html"))
<< message_;
}
namespace {
class TestPatternReceiver : public media::cast::InProcessReceiver {
public:
explicit TestPatternReceiver(
const scoped_refptr<media::cast::CastEnvironment>& cast_environment,
const net::IPEndPoint& local_end_point)
: InProcessReceiver(cast_environment,
local_end_point,
net::IPEndPoint(),
media::cast::GetDefaultAudioReceiverConfig(),
media::cast::GetDefaultVideoReceiverConfig()),
target_tone_frequency_(0),
current_tone_frequency_(0.0f) {
memset(&target_color_, 0, sizeof(target_color_));
memset(¤t_color_, 0, sizeof(current_color_));
}
virtual ~TestPatternReceiver() {}
void WaitForColorAndTone(const uint8 yuv_color[3],
int tone_frequency,
base::TimeDelta duration) {
LOG(INFO) << "Waiting for test pattern: color=yuv("
<< static_cast<int>(yuv_color[0]) << ", "
<< static_cast<int>(yuv_color[1]) << ", "
<< static_cast<int>(yuv_color[2])
<< "), tone_frequency=" << tone_frequency << " Hz";
base::RunLoop run_loop;
cast_env()->PostTask(
media::cast::CastEnvironment::MAIN,
FROM_HERE,
base::Bind(&TestPatternReceiver::NotifyOnceMatched,
base::Unretained(this),
yuv_color,
tone_frequency,
duration,
media::BindToCurrentLoop(run_loop.QuitClosure())));
run_loop.Run();
}
private:
void NotifyOnceMatched(const uint8 yuv_color[3],
int tone_frequency,
base::TimeDelta match_duration,
const base::Closure& matched_callback) {
DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
match_duration_ = match_duration;
matched_callback_ = matched_callback;
target_color_[0] = yuv_color[0];
target_color_[1] = yuv_color[1];
target_color_[2] = yuv_color[2];
target_tone_frequency_ = tone_frequency;
first_time_near_target_color_ = base::TimeTicks();
first_time_near_target_tone_ = base::TimeTicks();
}
void NotifyIfMatched() {
DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
if (first_time_near_target_color_.is_null() ||
false)
return;
const base::TimeTicks now = cast_env()->Clock()->NowTicks();
if ((now - first_time_near_target_color_) >= match_duration_ &&
true) {
matched_callback_.Run();
}
}
virtual void OnAudioFrame(scoped_ptr<media::cast::PcmAudioFrame> audio_frame,
const base::TimeTicks& playout_time) OVERRIDE {
DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
if (audio_frame->samples.empty()) {
NOTREACHED() << "OnAudioFrame called with no samples?!?";
return;
}
const int crossings = media::cast::CountZeroCrossings(audio_frame->samples);
const float seconds_per_frame = audio_frame->samples.size() /
static_cast<float>(audio_frame->frequency);
const float frequency_in_frame = crossings / seconds_per_frame;
const float kAveragingWeight = 0.1f;
UpdateExponentialMovingAverage(
kAveragingWeight, frequency_in_frame, ¤t_tone_frequency_);
VLOG(1) << "Current audio tone frequency: " << current_tone_frequency_;
const float kTargetWindowHz = 20;
if (fabsf(current_tone_frequency_ - target_tone_frequency_) <
kTargetWindowHz) {
if (first_time_near_target_tone_.is_null())
first_time_near_target_tone_ = cast_env()->Clock()->NowTicks();
NotifyIfMatched();
} else {
first_time_near_target_tone_ = base::TimeTicks();
}
}
virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
const base::TimeTicks& render_time) OVERRIDE {
DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
CHECK(video_frame->format() == media::VideoFrame::YV12 ||
video_frame->format() == media::VideoFrame::I420 ||
video_frame->format() == media::VideoFrame::YV12A);
const int kPlanes[] = {media::VideoFrame::kYPlane,
media::VideoFrame::kUPlane,
media::VideoFrame::kVPlane};
for (size_t i = 0; i < arraysize(kPlanes); ++i) {
current_color_[i] =
ComputeMedianIntensityInPlane(video_frame->row_bytes(kPlanes[i]),
video_frame->rows(kPlanes[i]),
video_frame->stride(kPlanes[i]),
video_frame->data(kPlanes[i]));
}
VLOG(1) << "Current video color: yuv(" << current_color_[0] << ", "
<< current_color_[1] << ", " << current_color_[2] << ')';
const float kTargetWindow = 10.0f;
if (fabsf(current_color_[0] - target_color_[0]) < kTargetWindow &&
fabsf(current_color_[1] - target_color_[1]) < kTargetWindow &&
fabsf(current_color_[2] - target_color_[2]) < kTargetWindow) {
if (first_time_near_target_color_.is_null())
first_time_near_target_color_ = cast_env()->Clock()->NowTicks();
NotifyIfMatched();
} else {
first_time_near_target_color_ = base::TimeTicks();
}
}
static void UpdateExponentialMovingAverage(float weight,
float sample_value,
float* average) {
*average = weight * sample_value + (1.0f - weight) * (*average);
CHECK(base::IsFinite(*average));
}
static uint8 ComputeMedianIntensityInPlane(int width,
int height,
int stride,
uint8* data) {
const int num_pixels = width * height;
if (num_pixels <= 0)
return 0;
if (width < stride) {
for (int y = 1; y < height; ++y) {
uint8* const src = data + y * stride;
uint8* const dest = data + y * width;
memmove(dest, src, width);
}
}
const size_t middle_idx = num_pixels / 2;
std::nth_element(data, data + middle_idx, data + num_pixels);
return data[middle_idx];
}
base::TimeDelta match_duration_;
base::Closure matched_callback_;
float target_color_[3];
float target_tone_frequency_;
float current_color_[3];
base::TimeTicks first_time_near_target_color_;
float current_tone_frequency_;
base::TimeTicks first_time_near_target_tone_;
DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver);
};
}
class CastStreamingApiTestWithPixelOutput : public CastStreamingApiTest {
virtual void SetUp() OVERRIDE {
EnablePixelOutput();
CastStreamingApiTest::SetUp();
}
virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE {
command_line->AppendSwitchASCII(::switches::kWindowSize, "128,128");
CastStreamingApiTest::SetUpCommandLine(command_line);
}
};
IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, DISABLED_EndToEnd) {
net::IPAddressNumber localhost;
localhost.push_back(127);
localhost.push_back(0);
localhost.push_back(0);
localhost.push_back(1);
scoped_ptr<net::UDPSocket> receive_socket(
new net::UDPSocket(net::DatagramSocket::DEFAULT_BIND,
net::RandIntCallback(),
NULL,
net::NetLog::Source()));
receive_socket->AllowAddressReuse();
ASSERT_EQ(net::OK, receive_socket->Bind(net::IPEndPoint(localhost, 0)));
net::IPEndPoint receiver_end_point;
ASSERT_EQ(net::OK, receive_socket->GetLocalAddress(&receiver_end_point));
receive_socket.reset();
const scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment(
new media::cast::StandaloneCastEnvironment());
TestPatternReceiver* const receiver =
new TestPatternReceiver(cast_environment, receiver_end_point);
receiver->Start();
const std::string page_url = base::StringPrintf(
"end_to_end_sender.html?port=%d", receiver_end_point.port());
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_;
const uint8 kRedInYUV[3] = {82, 90, 240};
const uint8 kGreenInYUV[3] = {145, 54, 34};
const uint8 kBlueInYUV[3] = {41, 240, 110};
const base::TimeDelta kOneHalfSecond = base::TimeDelta::FromMilliseconds(500);
receiver->WaitForColorAndTone(kRedInYUV, 200 , kOneHalfSecond);
receiver->WaitForColorAndTone(kGreenInYUV, 500 , kOneHalfSecond);
receiver->WaitForColorAndTone(kBlueInYUV, 1800 , kOneHalfSecond);
cast_environment->Shutdown();
}
IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, RtpStreamError) {
ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "rtp_stream_error.html"));
}
}