This source file includes following definitions.
- GetDeviceFilter
- PinMatchesCategory
- GetPin
- FreeMediaType
- DeleteMediaType
- get
- Free
- Receive
- TranslateMediaSubtypeToPixelFormat
- GetDeviceNames
- GetDeviceSupportedFormats
- Create
- GetDeviceNames
- GetDeviceSupportedFormats
- state_
- Init
- AllocateAndStart
- StopAndDeAllocate
- FrameReceived
- CreateCapabilityMap
- SetErrorState
#include "media/video/capture/win/video_capture_device_win.h"
#include <algorithm>
#include <list>
#include "base/command_line.h"
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
#include "base/win/metro.h"
#include "base/win/scoped_co_mem.h"
#include "base/win/scoped_variant.h"
#include "base/win/windows_version.h"
#include "media/base/media_switches.h"
#include "media/video/capture/win/video_capture_device_mf_win.h"
using base::win::ScopedCoMem;
using base::win::ScopedComPtr;
using base::win::ScopedVariant;
namespace media {
namespace {
HRESULT GetDeviceFilter(const VideoCaptureDevice::Name& device_name,
IBaseFilter** filter) {
DCHECK(filter);
ScopedComPtr<ICreateDevEnum> dev_enum;
HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC);
if (FAILED(hr))
return hr;
ScopedComPtr<IEnumMoniker> enum_moniker;
hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
enum_moniker.Receive(), 0);
if (hr != S_OK)
return NULL;
ScopedComPtr<IMoniker> moniker;
ScopedComPtr<IBaseFilter> capture_filter;
DWORD fetched = 0;
while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
ScopedComPtr<IPropertyBag> prop_bag;
hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
if (FAILED(hr)) {
moniker.Release();
continue;
}
static const wchar_t* kPropertyNames[] = {
L"DevicePath", L"Description", L"FriendlyName"
};
ScopedVariant name;
for (size_t i = 0;
i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
}
if (name.type() == VT_BSTR) {
std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
if (device_path.compare(device_name.id()) == 0) {
hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
capture_filter.ReceiveVoid());
DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
break;
}
}
moniker.Release();
}
*filter = capture_filter.Detach();
if (!*filter && SUCCEEDED(hr))
hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
return hr;
}
bool PinMatchesCategory(IPin* pin, REFGUID category) {
DCHECK(pin);
bool found = false;
ScopedComPtr<IKsPropertySet> ks_property;
HRESULT hr = ks_property.QueryFrom(pin);
if (SUCCEEDED(hr)) {
GUID pin_category;
DWORD return_value;
hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
&pin_category, sizeof(pin_category), &return_value);
if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
found = (pin_category == category);
}
}
return found;
}
ScopedComPtr<IPin> GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir,
REFGUID category) {
ScopedComPtr<IPin> pin;
ScopedComPtr<IEnumPins> pin_emum;
HRESULT hr = filter->EnumPins(pin_emum.Receive());
if (pin_emum == NULL)
return pin;
hr = pin_emum->Reset();
while ((hr = pin_emum->Next(1, pin.Receive(), NULL)) == S_OK) {
PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
hr = pin->QueryDirection(&this_pin_dir);
if (pin_dir == this_pin_dir) {
if (category == GUID_NULL || PinMatchesCategory(pin, category))
return pin;
}
pin.Release();
}
DCHECK(!pin);
return pin;
}
void FreeMediaType(AM_MEDIA_TYPE* mt) {
if (mt->cbFormat != 0) {
CoTaskMemFree(mt->pbFormat);
mt->cbFormat = 0;
mt->pbFormat = NULL;
}
if (mt->pUnk != NULL) {
NOTREACHED();
mt->pUnk->Release();
mt->pUnk = NULL;
}
}
void DeleteMediaType(AM_MEDIA_TYPE* mt) {
if (mt != NULL) {
FreeMediaType(mt);
CoTaskMemFree(mt);
}
}
class ScopedMediaType {
public:
ScopedMediaType() : media_type_(NULL) {}
~ScopedMediaType() { Free(); }
AM_MEDIA_TYPE* operator->() { return media_type_; }
AM_MEDIA_TYPE* get() { return media_type_; }
void Free() {
if (!media_type_)
return;
DeleteMediaType(media_type_);
media_type_= NULL;
}
AM_MEDIA_TYPE** Receive() {
DCHECK(!media_type_);
return &media_type_;
}
private:
AM_MEDIA_TYPE* media_type_;
};
VideoPixelFormat TranslateMediaSubtypeToPixelFormat(const GUID& sub_type) {
static struct {
const GUID& sub_type;
VideoPixelFormat format;
} pixel_formats[] = {
{ kMediaSubTypeI420, PIXEL_FORMAT_I420 },
{ MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420 },
{ MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24 },
{ MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2 },
{ MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG },
{ MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY },
{ MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB },
};
for (size_t i = 0; i < ARRAYSIZE_UNSAFE(pixel_formats); ++i) {
if (sub_type == pixel_formats[i].sub_type)
return pixel_formats[i].format;
}
#ifndef NDEBUG
WCHAR guid_str[128];
StringFromGUID2(sub_type, guid_str, arraysize(guid_str));
DVLOG(2) << "Device (also) supports an unknown media type " << guid_str;
#endif
return PIXEL_FORMAT_UNKNOWN;
}
}
void VideoCaptureDevice::GetDeviceNames(Names* device_names) {
const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
if ((base::win::IsMetroProcess() &&
!cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) ||
(base::win::GetVersion() >= base::win::VERSION_WIN7 &&
cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) {
VideoCaptureDeviceMFWin::GetDeviceNames(device_names);
} else {
VideoCaptureDeviceWin::GetDeviceNames(device_names);
}
}
void VideoCaptureDevice::GetDeviceSupportedFormats(const Name& device,
VideoCaptureFormats* formats) {
const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
if ((base::win::IsMetroProcess() &&
!cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) ||
(base::win::GetVersion() >= base::win::VERSION_WIN7 &&
cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) {
VideoCaptureDeviceMFWin::GetDeviceSupportedFormats(device, formats);
} else {
VideoCaptureDeviceWin::GetDeviceSupportedFormats(device, formats);
}
}
VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) {
VideoCaptureDevice* ret = NULL;
if (device_name.capture_api_type() == Name::MEDIA_FOUNDATION) {
DCHECK(VideoCaptureDeviceMFWin::PlatformSupported());
scoped_ptr<VideoCaptureDeviceMFWin> device(
new VideoCaptureDeviceMFWin(device_name));
DVLOG(1) << " MediaFoundation Device: " << device_name.name();
if (device->Init())
ret = device.release();
} else if (device_name.capture_api_type() == Name::DIRECT_SHOW) {
scoped_ptr<VideoCaptureDeviceWin> device(
new VideoCaptureDeviceWin(device_name));
DVLOG(1) << " DirectShow Device: " << device_name.name();
if (device->Init())
ret = device.release();
} else{
NOTREACHED() << " Couldn't recognize VideoCaptureDevice type";
}
return ret;
}
void VideoCaptureDeviceWin::GetDeviceNames(Names* device_names) {
DCHECK(device_names);
ScopedComPtr<ICreateDevEnum> dev_enum;
HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC);
if (FAILED(hr))
return;
ScopedComPtr<IEnumMoniker> enum_moniker;
hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
enum_moniker.Receive(), 0);
if (hr != S_OK)
return;
device_names->clear();
static const char kGoogleCameraAdapter[] = "google camera adapter";
ScopedComPtr<IMoniker> moniker;
int index = 0;
while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
ScopedComPtr<IPropertyBag> prop_bag;
hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
if (FAILED(hr)) {
moniker.Release();
continue;
}
ScopedVariant name;
hr = prop_bag->Read(L"Description", name.Receive(), 0);
if (FAILED(hr))
hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
if (SUCCEEDED(hr) && name.type() == VT_BSTR) {
const wchar_t* str_ptr = V_BSTR(&name);
const int name_length = arraysize(kGoogleCameraAdapter) - 1;
if ((wcsstr(str_ptr, L"(VFW)") == NULL) &&
lstrlenW(str_ptr) < name_length ||
(!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length,
kGoogleCameraAdapter)))) {
std::string id;
std::string device_name(base::SysWideToUTF8(str_ptr));
name.Reset();
hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
if (FAILED(hr) || name.type() != VT_BSTR) {
id = device_name;
} else {
DCHECK_EQ(name.type(), VT_BSTR);
id = base::SysWideToUTF8(V_BSTR(&name));
}
device_names->push_back(Name(device_name, id, Name::DIRECT_SHOW));
}
}
moniker.Release();
}
}
void VideoCaptureDeviceWin::GetDeviceSupportedFormats(const Name& device,
VideoCaptureFormats* formats) {
DVLOG(1) << "GetDeviceSupportedFormats for " << device.name();
ScopedComPtr<ICreateDevEnum> dev_enum;
HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC);
if (FAILED(hr))
return;
ScopedComPtr<IEnumMoniker> enum_moniker;
hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
enum_moniker.Receive(), 0);
if (hr != S_OK)
return;
ScopedComPtr<IMoniker> moniker;
int index = 0;
ScopedVariant device_id;
while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
ScopedComPtr<IPropertyBag> prop_bag;
hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
if (FAILED(hr)) {
moniker.Release();
continue;
}
device_id.Reset();
hr = prop_bag->Read(L"DevicePath", device_id.Receive(), 0);
if (FAILED(hr)) {
DVLOG(1) << "Couldn't read a device's DevicePath.";
return;
}
if (device.id() == base::SysWideToUTF8(V_BSTR(&device_id)))
break;
moniker.Release();
}
if (moniker.get()) {
base::win::ScopedComPtr<IBaseFilter> capture_filter;
hr = GetDeviceFilter(device, capture_filter.Receive());
if (!capture_filter) {
DVLOG(2) << "Failed to create capture filter.";
return;
}
base::win::ScopedComPtr<IPin> output_capture_pin(
GetPin(capture_filter, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE));
if (!output_capture_pin) {
DVLOG(2) << "Failed to get capture output pin";
return;
}
ScopedComPtr<IAMStreamConfig> stream_config;
hr = output_capture_pin.QueryInterface(stream_config.Receive());
if (FAILED(hr)) {
DVLOG(2) << "Failed to get IAMStreamConfig interface from "
"capture device";
return;
}
int count = 0, size = 0;
hr = stream_config->GetNumberOfCapabilities(&count, &size);
if (FAILED(hr)) {
DVLOG(2) << "Failed to GetNumberOfCapabilities";
return;
}
scoped_ptr<BYTE[]> caps(new BYTE[size]);
for (int i = 0; i < count; ++i) {
ScopedMediaType media_type;
hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get());
if (hr != S_OK) {
DVLOG(2) << "Failed to GetStreamCaps";
return;
}
if (media_type->majortype == MEDIATYPE_Video &&
media_type->formattype == FORMAT_VideoInfo) {
VIDEOINFOHEADER* h =
reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
VideoCaptureFormat format;
format.frame_size.SetSize(h->bmiHeader.biWidth,
h->bmiHeader.biHeight);
format.frame_rate = (h->AvgTimePerFrame > 0) ?
static_cast<int>(kSecondsToReferenceTime / h->AvgTimePerFrame) :
0;
format.pixel_format =
TranslateMediaSubtypeToPixelFormat(media_type->subtype);
formats->push_back(format);
DVLOG(1) << device.name() << " resolution: "
<< format.frame_size.ToString() << ", fps: " << format.frame_rate
<< ", pixel format: " << format.pixel_format;
}
}
}
}
VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
: device_name_(device_name),
state_(kIdle) {
DetachFromThread();
}
VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
DCHECK(CalledOnValidThread());
if (media_control_)
media_control_->Stop();
if (graph_builder_) {
if (sink_filter_) {
graph_builder_->RemoveFilter(sink_filter_);
sink_filter_ = NULL;
}
if (capture_filter_)
graph_builder_->RemoveFilter(capture_filter_);
if (mjpg_filter_)
graph_builder_->RemoveFilter(mjpg_filter_);
}
}
bool VideoCaptureDeviceWin::Init() {
DCHECK(CalledOnValidThread());
HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
if (!capture_filter_) {
DVLOG(2) << "Failed to create capture filter.";
return false;
}
output_capture_pin_ =
GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE);
if (!output_capture_pin_) {
DVLOG(2) << "Failed to get capture output pin";
return false;
}
sink_filter_ = new SinkFilter(this);
if (sink_filter_ == NULL) {
DVLOG(2) << "Failed to create send filter";
return false;
}
input_sink_pin_ = sink_filter_->GetPin(0);
hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
CLSCTX_INPROC_SERVER);
if (FAILED(hr)) {
DVLOG(2) << "Failed to create graph builder.";
return false;
}
hr = graph_builder_.QueryInterface(media_control_.Receive());
if (FAILED(hr)) {
DVLOG(2) << "Failed to create media control builder.";
return false;
}
hr = graph_builder_->AddFilter(capture_filter_, NULL);
if (FAILED(hr)) {
DVLOG(2) << "Failed to add the capture device to the graph.";
return false;
}
hr = graph_builder_->AddFilter(sink_filter_, NULL);
if (FAILED(hr)) {
DVLOG(2)<< "Failed to add the send filter to the graph.";
return false;
}
return CreateCapabilityMap();
}
void VideoCaptureDeviceWin::AllocateAndStart(
const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
DCHECK(CalledOnValidThread());
if (state_ != kIdle)
return;
client_ = client.Pass();
const VideoCaptureCapabilityWin& found_capability =
capabilities_.GetBestMatchedFormat(
params.requested_format.frame_size.width(),
params.requested_format.frame_size.height(),
params.requested_format.frame_rate);
VideoCaptureFormat format = found_capability.supported_format;
if (format.frame_rate > params.requested_format.frame_rate)
format.frame_rate = params.requested_format.frame_rate;
ScopedComPtr<IAMStreamConfig> stream_config;
HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
if (FAILED(hr)) {
SetErrorState("Can't get the Capture format settings");
return;
}
int count = 0, size = 0;
hr = stream_config->GetNumberOfCapabilities(&count, &size);
if (FAILED(hr)) {
DVLOG(2) << "Failed to GetNumberOfCapabilities";
return;
}
scoped_ptr<BYTE[]> caps(new BYTE[size]);
ScopedMediaType media_type;
hr = stream_config->GetStreamCaps(
found_capability.stream_index, media_type.Receive(), caps.get());
if (SUCCEEDED(hr)) {
if (media_type->formattype == FORMAT_VideoInfo) {
VIDEOINFOHEADER* h =
reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
if (format.frame_rate > 0)
h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate;
}
sink_filter_->SetRequestedMediaFormat(format);
hr = stream_config->SetFormat(media_type.get());
}
if (FAILED(hr))
SetErrorState("Failed to set capture device output format");
if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) {
hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
if (SUCCEEDED(hr)) {
input_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL);
output_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL);
hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
}
if (FAILED(hr)) {
mjpg_filter_.Release();
input_mjpg_pin_.Release();
output_mjpg_pin_.Release();
}
}
if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) {
hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
NULL);
hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
NULL);
} else {
hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
NULL);
}
if (FAILED(hr)) {
SetErrorState("Failed to connect the Capture graph.");
return;
}
hr = media_control_->Pause();
if (FAILED(hr)) {
SetErrorState("Failed to Pause the Capture device. "
"Is it already occupied?");
return;
}
capture_format_ = sink_filter_->ResultingFormat();
hr = media_control_->Run();
if (FAILED(hr)) {
SetErrorState("Failed to start the Capture device.");
return;
}
state_ = kCapturing;
}
void VideoCaptureDeviceWin::StopAndDeAllocate() {
DCHECK(CalledOnValidThread());
if (state_ != kCapturing)
return;
HRESULT hr = media_control_->Stop();
if (FAILED(hr)) {
SetErrorState("Failed to stop the capture graph.");
return;
}
graph_builder_->Disconnect(output_capture_pin_);
graph_builder_->Disconnect(input_sink_pin_);
if (mjpg_filter_) {
graph_builder_->Disconnect(input_mjpg_pin_);
graph_builder_->Disconnect(output_mjpg_pin_);
}
if (FAILED(hr)) {
SetErrorState("Failed to Stop the Capture device");
return;
}
client_.reset();
state_ = kIdle;
}
void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
int length) {
client_->OnIncomingCapturedData(
buffer, length, capture_format_, 0, base::TimeTicks::Now());
}
bool VideoCaptureDeviceWin::CreateCapabilityMap() {
DCHECK(CalledOnValidThread());
ScopedComPtr<IAMStreamConfig> stream_config;
HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
if (FAILED(hr)) {
DVLOG(2) << "Failed to get IAMStreamConfig interface from "
"capture device";
return false;
}
ScopedComPtr<IAMVideoControl> video_control;
hr = capture_filter_.QueryInterface(video_control.Receive());
DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
int count = 0, size = 0;
hr = stream_config->GetNumberOfCapabilities(&count, &size);
if (FAILED(hr)) {
DVLOG(2) << "Failed to GetNumberOfCapabilities";
return false;
}
scoped_ptr<BYTE[]> caps(new BYTE[size]);
for (int i = 0; i < count; ++i) {
ScopedMediaType media_type;
hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get());
if (hr != S_OK) {
DVLOG(2) << "Failed to GetStreamCaps";
return false;
}
if (media_type->majortype == MEDIATYPE_Video &&
media_type->formattype == FORMAT_VideoInfo) {
VideoCaptureCapabilityWin capability(i);
VIDEOINFOHEADER* h =
reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth,
h->bmiHeader.biHeight);
REFERENCE_TIME time_per_frame = h->AvgTimePerFrame;
if (video_control) {
ScopedCoMem<LONGLONG> max_fps;
LONG list_size = 0;
SIZE size = {capability.supported_format.frame_size.width(),
capability.supported_format.frame_size.height()};
hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
&list_size, &max_fps);
if (hr == S_OK && list_size > 0 && max_fps) {
time_per_frame = *std::min_element(max_fps.get(),
max_fps.get() + list_size);
}
}
capability.supported_format.frame_rate =
(time_per_frame > 0)
? static_cast<int>(kSecondsToReferenceTime / time_per_frame)
: 0;
capability.frame_rate_numerator = capability.supported_format.frame_rate;
capability.frame_rate_denominator = 1;
capability.supported_format.pixel_format =
TranslateMediaSubtypeToPixelFormat(media_type->subtype);
capabilities_.Add(capability);
}
}
return !capabilities_.empty();
}
void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) {
DCHECK(CalledOnValidThread());
DVLOG(1) << reason;
state_ = kError;
client_->OnError(reason);
}
}