This source file includes following definitions.
- sequence_number_
- CreateSharedMemory
- OnCaptureCompleted
- OnCursorShapeChanged
- Start
- Stop
- Pause
- UpdateSequenceNumber
- StartOnCaptureThread
- StopOnCaptureThread
- ScheduleNextCapture
- CaptureNextFrame
- FrameCaptureCompleted
- SendVideoPacket
- VideoFrameSentCallback
- SendCursorShape
- EncodeFrame
#include "remoting/host/video_scheduler.h"
#include <algorithm>
#include "base/bind.h"
#include "base/callback.h"
#include "base/logging.h"
#include "base/memory/scoped_ptr.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/stl_util.h"
#include "base/sys_info.h"
#include "base/time/time.h"
#include "remoting/proto/control.pb.h"
#include "remoting/proto/internal.pb.h"
#include "remoting/proto/video.pb.h"
#include "remoting/protocol/cursor_shape_stub.h"
#include "remoting/protocol/message_decoder.h"
#include "remoting/protocol/util.h"
#include "remoting/protocol/video_stub.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
#include "third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h"
#include "third_party/webrtc/modules/desktop_capture/screen_capturer.h"
namespace remoting {
static const int kMaxPendingFrames = 2;
VideoScheduler::VideoScheduler(
scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> encode_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> network_task_runner,
scoped_ptr<webrtc::ScreenCapturer> capturer,
scoped_ptr<VideoEncoder> encoder,
protocol::CursorShapeStub* cursor_stub,
protocol::VideoStub* video_stub)
: capture_task_runner_(capture_task_runner),
encode_task_runner_(encode_task_runner),
network_task_runner_(network_task_runner),
capturer_(capturer.Pass()),
encoder_(encoder.Pass()),
cursor_stub_(cursor_stub),
video_stub_(video_stub),
pending_frames_(0),
capture_pending_(false),
did_skip_frame_(false),
is_paused_(false),
sequence_number_(0) {
DCHECK(network_task_runner_->BelongsToCurrentThread());
DCHECK(capturer_);
DCHECK(encoder_);
DCHECK(cursor_stub_);
DCHECK(video_stub_);
}
webrtc::SharedMemory* VideoScheduler::CreateSharedMemory(size_t size) {
return NULL;
}
void VideoScheduler::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
capture_pending_ = false;
scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
if (frame) {
scheduler_.RecordCaptureTime(
base::TimeDelta::FromMilliseconds(frame->capture_time_ms()));
}
encode_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::EncodeFrame, this,
base::Passed(&owned_frame), sequence_number_));
if (did_skip_frame_) {
capture_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::CaptureNextFrame, this));
}
}
void VideoScheduler::OnCursorShapeChanged(
webrtc::MouseCursorShape* cursor_shape) {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
scoped_ptr<webrtc::MouseCursorShape> owned_cursor(cursor_shape);
if (!capturer_)
return;
scoped_ptr<protocol::CursorShapeInfo> cursor_proto(
new protocol::CursorShapeInfo());
cursor_proto->set_width(cursor_shape->size.width());
cursor_proto->set_height(cursor_shape->size.height());
cursor_proto->set_hotspot_x(cursor_shape->hotspot.x());
cursor_proto->set_hotspot_y(cursor_shape->hotspot.y());
cursor_proto->set_data(cursor_shape->data);
network_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::SendCursorShape, this,
base::Passed(&cursor_proto)));
}
void VideoScheduler::Start() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
capture_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::StartOnCaptureThread, this));
}
void VideoScheduler::Stop() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
cursor_stub_ = NULL;
video_stub_ = NULL;
capture_task_runner_->PostTask(FROM_HERE,
base::Bind(&VideoScheduler::StopOnCaptureThread, this));
}
void VideoScheduler::Pause(bool pause) {
if (!capture_task_runner_->BelongsToCurrentThread()) {
DCHECK(network_task_runner_->BelongsToCurrentThread());
capture_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::Pause, this, pause));
return;
}
if (is_paused_ != pause) {
is_paused_ = pause;
if (!is_paused_ && capture_timer_ && !capture_timer_->IsRunning())
CaptureNextFrame();
}
}
void VideoScheduler::UpdateSequenceNumber(int64 sequence_number) {
if (!capture_task_runner_->BelongsToCurrentThread()) {
DCHECK(network_task_runner_->BelongsToCurrentThread());
capture_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::UpdateSequenceNumber,
this, sequence_number));
return;
}
sequence_number_ = sequence_number;
}
VideoScheduler::~VideoScheduler() {
}
void VideoScheduler::StartOnCaptureThread() {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
DCHECK(!capture_timer_);
capturer_->SetMouseShapeObserver(this);
capturer_->Start(this);
capture_timer_.reset(new base::OneShotTimer<VideoScheduler>());
CaptureNextFrame();
}
void VideoScheduler::StopOnCaptureThread() {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
capturer_.reset();
capture_timer_.reset();
}
void VideoScheduler::ScheduleNextCapture() {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
capture_timer_->Start(FROM_HERE,
scheduler_.NextCaptureDelay(),
this,
&VideoScheduler::CaptureNextFrame);
}
void VideoScheduler::CaptureNextFrame() {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
if (!capturer_ || is_paused_)
return;
if (pending_frames_ >= kMaxPendingFrames || capture_pending_) {
did_skip_frame_ = true;
return;
}
did_skip_frame_ = false;
pending_frames_++;
DCHECK_LE(pending_frames_, kMaxPendingFrames);
ScheduleNextCapture();
capture_pending_ = true;
capturer_->Capture(webrtc::DesktopRegion());
}
void VideoScheduler::FrameCaptureCompleted() {
DCHECK(capture_task_runner_->BelongsToCurrentThread());
pending_frames_--;
DCHECK_GE(pending_frames_, 0);
if (did_skip_frame_)
CaptureNextFrame();
}
void VideoScheduler::SendVideoPacket(scoped_ptr<VideoPacket> packet) {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (!video_stub_)
return;
video_stub_->ProcessVideoPacket(
packet.Pass(), base::Bind(&VideoScheduler::VideoFrameSentCallback, this));
}
void VideoScheduler::VideoFrameSentCallback() {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (!video_stub_)
return;
capture_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::FrameCaptureCompleted, this));
}
void VideoScheduler::SendCursorShape(
scoped_ptr<protocol::CursorShapeInfo> cursor_shape) {
DCHECK(network_task_runner_->BelongsToCurrentThread());
if (!cursor_stub_)
return;
cursor_stub_->SetCursorShape(*cursor_shape);
}
void VideoScheduler::EncodeFrame(
scoped_ptr<webrtc::DesktopFrame> frame,
int64 sequence_number) {
DCHECK(encode_task_runner_->BelongsToCurrentThread());
if (!frame || frame->updated_region().is_empty()) {
scoped_ptr<VideoPacket> packet(new VideoPacket());
packet->set_client_sequence_number(sequence_number);
network_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::SendVideoPacket, this,
base::Passed(&packet)));
capture_task_runner_->DeleteSoon(FROM_HERE, frame.release());
return;
}
scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame);
packet->set_client_sequence_number(sequence_number);
frame.reset();
scheduler_.RecordEncodeTime(
base::TimeDelta::FromMilliseconds(packet->encode_time_ms()));
network_task_runner_->PostTask(
FROM_HERE, base::Bind(&VideoScheduler::SendVideoPacket, this,
base::Passed(&packet)));
}
}