This source file includes following definitions.
- DefaultOpusPayload
- DefaultVp8Payload
- DefaultH264Payload
- IsHardwareVP8EncodingSupported
- IsHardwareH264EncodingSupported
- SupportedAudioParams
- SupportedVideoParams
- ToAudioSenderConfig
- ToVideoSenderConfig
- error_callback_
- OnVideoFrame
- AddToTrack
- output_sample_rate_
- OnData
- ResampleData
- OnSetFormat
- AddToTrack
- ProvideData
- height
- weak_factory_
- GetSupportedParams
- GetParams
- Start
- Stop
- ToggleLogging
- GetRawEvents
- GetStats
- IsAudio
- DidEncounterError
#include "chrome/renderer/media/cast_rtp_stream.h"
#include "base/bind.h"
#include "base/debug/trace_event.h"
#include "base/logging.h"
#include "base/memory/weak_ptr.h"
#include "chrome/renderer/media/cast_session.h"
#include "chrome/renderer/media/cast_udp_transport.h"
#include "content/public/renderer/media_stream_audio_sink.h"
#include "content/public/renderer/media_stream_video_sink.h"
#include "content/public/renderer/render_thread.h"
#include "content/public/renderer/video_encode_accelerator.h"
#include "media/audio/audio_parameters.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_fifo.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/multi_channel_resampler.h"
#include "media/base/video_frame.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_defines.h"
#include "media/cast/cast_sender.h"
#include "media/cast/transport/cast_transport_config.h"
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
#include "ui/gfx/geometry/size.h"
using media::cast::AudioSenderConfig;
using media::cast::VideoSenderConfig;
namespace {
const char kCodecNameOpus[] = "OPUS";
const char kCodecNameVp8[] = "VP8";
const char kCodecNameH264[] = "H264";
const int kBitrateMultiplier = 1000;
const int kBufferAudioData = 2;
CastRtpPayloadParams DefaultOpusPayload() {
CastRtpPayloadParams payload;
payload.ssrc = 1;
payload.feedback_ssrc = 2;
payload.payload_type = 127;
payload.codec_name = kCodecNameOpus;
payload.clock_rate = 48000;
payload.channels = 2;
payload.min_bitrate = payload.max_bitrate =
media::cast::kDefaultAudioEncoderBitrate;
return payload;
}
CastRtpPayloadParams DefaultVp8Payload() {
CastRtpPayloadParams payload;
payload.ssrc = 11;
payload.feedback_ssrc = 12;
payload.payload_type = 96;
payload.codec_name = kCodecNameVp8;
payload.clock_rate = 90000;
payload.width = 1280;
payload.height = 720;
payload.min_bitrate = 50;
payload.max_bitrate = 2000;
return payload;
}
CastRtpPayloadParams DefaultH264Payload() {
CastRtpPayloadParams payload;
payload.ssrc = 11;
payload.feedback_ssrc = 12;
payload.payload_type = 96;
payload.codec_name = kCodecNameH264;
payload.clock_rate = 90000;
payload.width = 1280;
payload.height = 720;
payload.min_bitrate = 50;
payload.max_bitrate = 2000;
return payload;
}
bool IsHardwareVP8EncodingSupported() {
std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
content::GetSupportedVideoEncodeAcceleratorProfiles();
for (size_t i = 0; i < vea_profiles.size(); ++i) {
if (vea_profiles[i].profile >= media::VP8PROFILE_MIN &&
vea_profiles[i].profile <= media::VP8PROFILE_MAX) {
return true;
}
}
return false;
}
bool IsHardwareH264EncodingSupported() {
std::vector<media::VideoEncodeAccelerator::SupportedProfile> vea_profiles =
content::GetSupportedVideoEncodeAcceleratorProfiles();
for (size_t i = 0; i < vea_profiles.size(); ++i) {
if (vea_profiles[i].profile >= media::H264PROFILE_MIN &&
vea_profiles[i].profile <= media::H264PROFILE_MAX) {
return true;
}
}
return false;
}
std::vector<CastRtpParams> SupportedAudioParams() {
std::vector<CastRtpParams> supported_params;
supported_params.push_back(CastRtpParams(DefaultOpusPayload()));
return supported_params;
}
std::vector<CastRtpParams> SupportedVideoParams() {
std::vector<CastRtpParams> supported_params;
if (IsHardwareH264EncodingSupported())
supported_params.push_back(CastRtpParams(DefaultH264Payload()));
supported_params.push_back(CastRtpParams(DefaultVp8Payload()));
return supported_params;
}
bool ToAudioSenderConfig(const CastRtpParams& params,
AudioSenderConfig* config) {
config->sender_ssrc = params.payload.ssrc;
config->incoming_feedback_ssrc = params.payload.feedback_ssrc;
config->rtp_config.payload_type = params.payload.payload_type;
config->use_external_encoder = false;
config->frequency = params.payload.clock_rate;
config->channels = params.payload.channels;
config->bitrate = params.payload.max_bitrate * kBitrateMultiplier;
config->codec = media::cast::transport::kPcm16;
if (params.payload.codec_name == kCodecNameOpus)
config->codec = media::cast::transport::kOpus;
else
return false;
return true;
}
bool ToVideoSenderConfig(const CastRtpParams& params,
VideoSenderConfig* config) {
config->sender_ssrc = params.payload.ssrc;
config->incoming_feedback_ssrc = params.payload.feedback_ssrc;
config->rtp_config.payload_type = params.payload.payload_type;
config->use_external_encoder = false;
config->width = params.payload.width;
config->height = params.payload.height;
config->min_bitrate = config->start_bitrate =
params.payload.min_bitrate * kBitrateMultiplier;
config->max_bitrate = params.payload.max_bitrate * kBitrateMultiplier;
if (params.payload.codec_name == kCodecNameVp8) {
config->use_external_encoder = IsHardwareVP8EncodingSupported();
config->codec = media::cast::transport::kVp8;
} else if (params.payload.codec_name == kCodecNameH264) {
config->use_external_encoder = IsHardwareH264EncodingSupported();
config->codec = media::cast::transport::kH264;
} else {
return false;
}
return true;
}
}
class CastVideoSink : public base::SupportsWeakPtr<CastVideoSink>,
public content::MediaStreamVideoSink {
public:
CastVideoSink(const blink::WebMediaStreamTrack& track,
const gfx::Size& expected_coded_size,
const CastRtpStream::ErrorCallback& error_callback)
: track_(track),
sink_added_(false),
expected_coded_size_(expected_coded_size),
error_callback_(error_callback) {}
virtual ~CastVideoSink() {
if (sink_added_)
RemoveFromVideoTrack(this, track_);
}
virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& frame)
OVERRIDE {
if (frame->coded_size() != expected_coded_size_) {
error_callback_.Run("Video frame resolution does not match config.");
return;
}
if (first_frame_timestamp_.is_null())
first_frame_timestamp_ = base::TimeTicks::Now() - frame->GetTimestamp();;
TRACE_EVENT_INSTANT2(
"mirroring", "MediaStreamVideoSink::OnVideoFrame",
TRACE_EVENT_SCOPE_THREAD,
"timestamp",
(first_frame_timestamp_ + frame->GetTimestamp()).ToInternalValue(),
"time_delta", frame->GetTimestamp().ToInternalValue());
frame_input_->InsertRawVideoFrame(
frame, first_frame_timestamp_ + frame->GetTimestamp());
}
void AddToTrack(
const scoped_refptr<media::cast::VideoFrameInput>& frame_input) {
DCHECK(!sink_added_);
sink_added_ = true;
frame_input_ = frame_input;
AddToVideoTrack(this, track_);
}
private:
blink::WebMediaStreamTrack track_;
scoped_refptr<media::cast::VideoFrameInput> frame_input_;
bool sink_added_;
gfx::Size expected_coded_size_;
CastRtpStream::ErrorCallback error_callback_;
base::TimeTicks first_frame_timestamp_;
DISALLOW_COPY_AND_ASSIGN(CastVideoSink);
};
class CastAudioSink : public base::SupportsWeakPtr<CastAudioSink>,
public content::MediaStreamAudioSink {
public:
CastAudioSink(const blink::WebMediaStreamTrack& track,
const CastRtpStream::ErrorCallback& error_callback,
int output_channels,
int output_sample_rate)
: track_(track),
sink_added_(false),
error_callback_(error_callback),
weak_factory_(this),
input_preroll_(0),
output_channels_(output_channels),
output_sample_rate_(output_sample_rate) {}
virtual ~CastAudioSink() {
if (sink_added_)
RemoveFromAudioTrack(this, track_);
}
virtual void OnData(const int16* audio_data,
int sample_rate,
int number_of_channels,
int number_of_frames) OVERRIDE {
scoped_ptr<media::AudioBus> input_bus;
if (resampler_) {
input_bus = ResampleData(
audio_data, sample_rate, number_of_channels, number_of_frames);
if (!input_bus)
return;
} else {
input_bus = media::AudioBus::Create(
number_of_channels, number_of_frames);
input_bus->FromInterleaved(
audio_data, number_of_frames, number_of_channels);
}
frame_input_->InsertAudio(input_bus.Pass(), base::TimeTicks::Now());
}
scoped_ptr<media::AudioBus> ResampleData(
const int16* audio_data,
int sample_rate,
int number_of_channels,
int number_of_frames) {
DCHECK_EQ(number_of_channels, output_channels_);
fifo_input_bus_->FromInterleaved(
audio_data, number_of_frames, number_of_channels);
fifo_->Push(fifo_input_bus_.get());
if (input_preroll_ < kBufferAudioData - 1) {
++input_preroll_;
return scoped_ptr<media::AudioBus>();
}
scoped_ptr<media::AudioBus> output_bus(
media::AudioBus::Create(
output_channels_,
output_sample_rate_ * fifo_input_bus_->frames() / sample_rate));
resampler_->Resample(output_bus->frames(), output_bus.get());
return output_bus.Pass();
}
virtual void OnSetFormat(const media::AudioParameters& params) OVERRIDE {
if (params.sample_rate() == output_sample_rate_)
return;
fifo_.reset(new media::AudioFifo(
output_channels_,
kBufferAudioData * params.frames_per_buffer()));
fifo_input_bus_ = media::AudioBus::Create(
params.channels(), params.frames_per_buffer());
resampler_.reset(new media::MultiChannelResampler(
output_channels_,
static_cast<double>(params.sample_rate()) / output_sample_rate_,
params.frames_per_buffer(),
base::Bind(&CastAudioSink::ProvideData, base::Unretained(this))));
}
void AddToTrack(
const scoped_refptr<media::cast::AudioFrameInput>& frame_input) {
DCHECK(!sink_added_);
sink_added_ = true;
frame_input_ = frame_input;
AddToAudioTrack(this, track_);
}
void ProvideData(int frame_delay, media::AudioBus* output_bus) {
fifo_->Consume(output_bus, 0, output_bus->frames());
}
private:
blink::WebMediaStreamTrack track_;
bool sink_added_;
CastRtpStream::ErrorCallback error_callback_;
base::WeakPtrFactory<CastAudioSink> weak_factory_;
scoped_ptr<media::MultiChannelResampler> resampler_;
scoped_ptr<media::AudioFifo> fifo_;
scoped_ptr<media::AudioBus> fifo_input_bus_;
int input_preroll_;
const int output_channels_;
const int output_sample_rate_;
scoped_refptr<media::cast::AudioFrameInput> frame_input_;
DISALLOW_COPY_AND_ASSIGN(CastAudioSink);
};
CastRtpParams::CastRtpParams(const CastRtpPayloadParams& payload_params)
: payload(payload_params) {}
CastCodecSpecificParams::CastCodecSpecificParams() {}
CastCodecSpecificParams::~CastCodecSpecificParams() {}
CastRtpPayloadParams::CastRtpPayloadParams()
: payload_type(0),
ssrc(0),
feedback_ssrc(0),
clock_rate(0),
max_bitrate(0),
min_bitrate(0),
channels(0),
width(0),
height(0) {}
CastRtpPayloadParams::~CastRtpPayloadParams() {}
CastRtpParams::CastRtpParams() {}
CastRtpParams::~CastRtpParams() {}
CastRtpStream::CastRtpStream(const blink::WebMediaStreamTrack& track,
const scoped_refptr<CastSession>& session)
: track_(track), cast_session_(session), weak_factory_(this) {}
CastRtpStream::~CastRtpStream() {}
std::vector<CastRtpParams> CastRtpStream::GetSupportedParams() {
if (IsAudio())
return SupportedAudioParams();
else
return SupportedVideoParams();
}
CastRtpParams CastRtpStream::GetParams() { return params_; }
void CastRtpStream::Start(const CastRtpParams& params,
const base::Closure& start_callback,
const base::Closure& stop_callback,
const ErrorCallback& error_callback) {
stop_callback_ = stop_callback;
error_callback_ = error_callback;
if (IsAudio()) {
AudioSenderConfig config;
if (!ToAudioSenderConfig(params, &config)) {
DidEncounterError("Invalid parameters for audio.");
return;
}
audio_sink_.reset(new CastAudioSink(
track_,
media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
weak_factory_.GetWeakPtr())),
params.payload.channels,
params.payload.clock_rate));
cast_session_->StartAudio(
config,
base::Bind(&CastAudioSink::AddToTrack, audio_sink_->AsWeakPtr()),
base::Bind(&CastRtpStream::DidEncounterError,
weak_factory_.GetWeakPtr()));
start_callback.Run();
} else {
VideoSenderConfig config;
if (!ToVideoSenderConfig(params, &config)) {
DidEncounterError("Invalid parameters for video.");
return;
}
video_sink_.reset(new CastVideoSink(
track_,
gfx::Size(config.width, config.height),
media::BindToCurrentLoop(base::Bind(&CastRtpStream::DidEncounterError,
weak_factory_.GetWeakPtr()))));
cast_session_->StartVideo(
config,
base::Bind(&CastVideoSink::AddToTrack, video_sink_->AsWeakPtr()),
base::Bind(&CastRtpStream::DidEncounterError,
weak_factory_.GetWeakPtr()));
start_callback.Run();
}
}
void CastRtpStream::Stop() {
audio_sink_.reset();
video_sink_.reset();
stop_callback_.Run();
}
void CastRtpStream::ToggleLogging(bool enable) {
cast_session_->ToggleLogging(IsAudio(), enable);
}
void CastRtpStream::GetRawEvents(
const base::Callback<void(scoped_ptr<base::BinaryValue>)>& callback) {
cast_session_->GetEventLogsAndReset(IsAudio(), callback);
}
void CastRtpStream::GetStats(
const base::Callback<void(scoped_ptr<base::DictionaryValue>)>& callback) {
cast_session_->GetStatsAndReset(IsAudio(), callback);
}
bool CastRtpStream::IsAudio() const {
return track_.source().type() == blink::WebMediaStreamSource::TypeAudio;
}
void CastRtpStream::DidEncounterError(const std::string& message) {
base::WeakPtr<CastRtpStream> ptr = weak_factory_.GetWeakPtr();
error_callback_.Run(message);
content::RenderThread::Get()->GetMessageLoop()->PostTask(
FROM_HERE,
base::Bind(&CastRtpStream::Stop, ptr));
}