This source file includes following definitions.
- V4l2ColorToVideoCaptureColorFormat
- GetListOfUsableFourCCs
- HasUsableFormats
- GetDeviceNames
- GetDeviceSupportedFormats
- ReadIdFile
- GetModel
- Create
- timeout_count_
- AllocateAndStart
- StopAndDeAllocate
- OnAllocateAndStart
- OnStopAndDeAllocate
- OnCaptureTask
- AllocateVideoBuffers
- DeAllocateVideoBuffers
- SetErrorState
#include "media/video/capture/linux/video_capture_device_linux.h"
#include <errno.h>
#include <fcntl.h>
#if defined(OS_OPENBSD)
#include <sys/videoio.h>
#else
#include <linux/videodev2.h>
#endif
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <list>
#include <string>
#include "base/bind.h"
#include "base/files/file_enumerator.h"
#include "base/files/scoped_file.h"
#include "base/posix/eintr_wrapper.h"
#include "base/strings/stringprintf.h"
namespace media {
enum { kMaxVideoBuffers = 2 };
enum { kCaptureTimeoutUs = 200000 };
enum { kContinuousTimeoutLimit = 10 };
enum { kCaptureSelectWaitMs = 10 };
enum { kMjpegWidth = 640 };
enum { kMjpegHeight = 480 };
enum { kTypicalFramerate = 30 };
static const int32 kV4l2RawFmts[] = {
V4L2_PIX_FMT_YUV420,
V4L2_PIX_FMT_YUYV
};
static const size_t kVidPidSize = 4;
static const char kVidPathTemplate[] =
"/sys/class/video4linux/%s/device/../idVendor";
static const char kPidPathTemplate[] =
"/sys/class/video4linux/%s/device/../idProduct";
static VideoPixelFormat V4l2ColorToVideoCaptureColorFormat(
int32 v4l2_fourcc) {
VideoPixelFormat result = PIXEL_FORMAT_UNKNOWN;
switch (v4l2_fourcc) {
case V4L2_PIX_FMT_YUV420:
result = PIXEL_FORMAT_I420;
break;
case V4L2_PIX_FMT_YUYV:
result = PIXEL_FORMAT_YUY2;
break;
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG:
result = PIXEL_FORMAT_MJPEG;
break;
default:
DVLOG(1) << "Unsupported pixel format " << std::hex << v4l2_fourcc;
}
return result;
}
static void GetListOfUsableFourCCs(bool favour_mjpeg, std::list<int>* fourccs) {
for (size_t i = 0; i < arraysize(kV4l2RawFmts); ++i)
fourccs->push_back(kV4l2RawFmts[i]);
if (favour_mjpeg)
fourccs->push_front(V4L2_PIX_FMT_MJPEG);
else
fourccs->push_back(V4L2_PIX_FMT_MJPEG);
fourccs->push_back(V4L2_PIX_FMT_JPEG);
}
static bool HasUsableFormats(int fd) {
v4l2_fmtdesc fmtdesc;
std::list<int> usable_fourccs;
GetListOfUsableFourCCs(false, &usable_fourccs);
memset(&fmtdesc, 0, sizeof(v4l2_fmtdesc));
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (HANDLE_EINTR(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0) {
if (std::find(usable_fourccs.begin(), usable_fourccs.end(),
fmtdesc.pixelformat) != usable_fourccs.end())
return true;
fmtdesc.index++;
}
return false;
}
void VideoCaptureDevice::GetDeviceNames(Names* device_names) {
DCHECK(device_names->empty());
base::FilePath path("/dev/");
base::FileEnumerator enumerator(
path, false, base::FileEnumerator::FILES, "video*");
while (!enumerator.Next().empty()) {
base::FileEnumerator::FileInfo info = enumerator.GetInfo();
std::string unique_id = path.value() + info.GetName().value();
base::ScopedFD fd(HANDLE_EINTR(open(unique_id.c_str(), O_RDONLY)));
if (!fd.is_valid()) {
continue;
}
v4l2_capability cap;
if ((HANDLE_EINTR(ioctl(fd.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
!(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) {
if (HasUsableFormats(fd.get())) {
Name device_name(base::StringPrintf("%s", cap.card), unique_id);
device_names->push_back(device_name);
} else {
DVLOG(1) << "No usable formats reported by " << info.GetName().value();
}
}
}
}
void VideoCaptureDevice::GetDeviceSupportedFormats(
const Name& device,
VideoCaptureFormats* supported_formats) {
if (device.id().empty())
return;
base::ScopedFD fd(HANDLE_EINTR(open(device.id().c_str(), O_RDONLY)));
if (!fd.is_valid()) {
return;
}
supported_formats->clear();
v4l2_fmtdesc pixel_format = {};
pixel_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (HANDLE_EINTR(ioctl(fd.get(), VIDIOC_ENUM_FMT, &pixel_format)) == 0) {
VideoCaptureFormat supported_format;
supported_format.pixel_format =
V4l2ColorToVideoCaptureColorFormat((int32)pixel_format.pixelformat);
if (supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN) {
++pixel_format.index;
continue;
}
v4l2_frmsizeenum frame_size = {};
frame_size.pixel_format = pixel_format.pixelformat;
while (HANDLE_EINTR(ioctl(fd.get(), VIDIOC_ENUM_FRAMESIZES, &frame_size)) ==
0) {
if (frame_size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
supported_format.frame_size.SetSize(
frame_size.discrete.width, frame_size.discrete.height);
} else if (frame_size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
NOTIMPLEMENTED();
} else if (frame_size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
NOTIMPLEMENTED();
}
v4l2_frmivalenum frame_interval = {};
frame_interval.pixel_format = pixel_format.pixelformat;
frame_interval.width = frame_size.discrete.width;
frame_interval.height = frame_size.discrete.height;
while (HANDLE_EINTR(ioctl(
fd.get(), VIDIOC_ENUM_FRAMEINTERVALS, &frame_interval)) == 0) {
if (frame_interval.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
if (frame_interval.discrete.numerator != 0) {
supported_format.frame_rate =
static_cast<float>(frame_interval.discrete.denominator) /
static_cast<float>(frame_interval.discrete.numerator);
} else {
supported_format.frame_rate = 0;
}
} else if (frame_interval.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
NOTIMPLEMENTED();
break;
} else if (frame_interval.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
NOTIMPLEMENTED();
break;
}
supported_formats->push_back(supported_format);
++frame_interval.index;
}
++frame_size.index;
}
++pixel_format.index;
}
return;
}
static bool ReadIdFile(const std::string path, std::string* id) {
char id_buf[kVidPidSize];
FILE* file = fopen(path.c_str(), "rb");
if (!file)
return false;
const bool success = fread(id_buf, kVidPidSize, 1, file) == 1;
fclose(file);
if (!success)
return false;
id->append(id_buf, kVidPidSize);
return true;
}
const std::string VideoCaptureDevice::Name::GetModel() const {
const std::string dev_dir = "/dev/";
DCHECK_EQ(0, unique_id_.compare(0, dev_dir.length(), dev_dir));
const std::string file_name =
unique_id_.substr(dev_dir.length(), unique_id_.length());
const std::string vidPath =
base::StringPrintf(kVidPathTemplate, file_name.c_str());
const std::string pidPath =
base::StringPrintf(kPidPathTemplate, file_name.c_str());
std::string usb_id;
if (!ReadIdFile(vidPath, &usb_id))
return "";
usb_id.append(":");
if (!ReadIdFile(pidPath, &usb_id))
return "";
return usb_id;
}
VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) {
VideoCaptureDeviceLinux* self = new VideoCaptureDeviceLinux(device_name);
if (!self)
return NULL;
base::ScopedFD fd(HANDLE_EINTR(open(device_name.id().c_str(), O_RDONLY)));
if (!fd.is_valid()) {
DVLOG(1) << "Cannot open device";
delete self;
return NULL;
}
return self;
}
VideoCaptureDeviceLinux::VideoCaptureDeviceLinux(const Name& device_name)
: state_(kIdle),
device_name_(device_name),
v4l2_thread_("V4L2Thread"),
buffer_pool_(NULL),
buffer_pool_size_(0),
timeout_count_(0) {}
VideoCaptureDeviceLinux::~VideoCaptureDeviceLinux() {
state_ = kIdle;
DCHECK(!v4l2_thread_.IsRunning());
v4l2_thread_.Stop();
}
void VideoCaptureDeviceLinux::AllocateAndStart(
const VideoCaptureParams& params,
scoped_ptr<VideoCaptureDevice::Client> client) {
if (v4l2_thread_.IsRunning()) {
return;
}
v4l2_thread_.Start();
v4l2_thread_.message_loop()->PostTask(
FROM_HERE,
base::Bind(&VideoCaptureDeviceLinux::OnAllocateAndStart,
base::Unretained(this),
params.requested_format.frame_size.width(),
params.requested_format.frame_size.height(),
params.requested_format.frame_rate,
base::Passed(&client)));
}
void VideoCaptureDeviceLinux::StopAndDeAllocate() {
if (!v4l2_thread_.IsRunning()) {
return;
}
v4l2_thread_.message_loop()->PostTask(
FROM_HERE,
base::Bind(&VideoCaptureDeviceLinux::OnStopAndDeAllocate,
base::Unretained(this)));
v4l2_thread_.Stop();
DeAllocateVideoBuffers();
}
void VideoCaptureDeviceLinux::OnAllocateAndStart(int width,
int height,
int frame_rate,
scoped_ptr<Client> client) {
DCHECK_EQ(v4l2_thread_.message_loop(), base::MessageLoop::current());
client_ = client.Pass();
device_fd_.reset(HANDLE_EINTR(open(device_name_.id().c_str(), O_RDWR)));
if (!device_fd_.is_valid()) {
SetErrorState("Failed to open V4L2 device driver.");
return;
}
v4l2_capability cap;
if (!((HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYCAP, &cap)) == 0) &&
(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
!(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT))) {
device_fd_.reset();
SetErrorState("This is not a V4L2 video capture device");
return;
}
std::list<int> v4l2_formats;
GetListOfUsableFourCCs(width > kMjpegWidth || height > kMjpegHeight,
&v4l2_formats);
v4l2_fmtdesc fmtdesc;
memset(&fmtdesc, 0, sizeof(v4l2_fmtdesc));
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
std::list<int>::iterator best = v4l2_formats.end();
while (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_ENUM_FMT, &fmtdesc)) ==
0) {
best = std::find(v4l2_formats.begin(), best, fmtdesc.pixelformat);
fmtdesc.index++;
}
if (best == v4l2_formats.end()) {
SetErrorState("Failed to find a supported camera format.");
return;
}
v4l2_format video_fmt;
memset(&video_fmt, 0, sizeof(video_fmt));
video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_fmt.fmt.pix.sizeimage = 0;
video_fmt.fmt.pix.width = width;
video_fmt.fmt.pix.height = height;
video_fmt.fmt.pix.pixelformat = *best;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_FMT, &video_fmt)) < 0) {
SetErrorState("Failed to set camera format");
return;
}
v4l2_streamparm streamparm;
memset(&streamparm, 0, sizeof(v4l2_streamparm));
streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_G_PARM, &streamparm)) >= 0) {
if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
streamparm.parm.capture.timeperframe.numerator = 1;
streamparm.parm.capture.timeperframe.denominator =
(frame_rate) ? frame_rate : kTypicalFramerate;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_S_PARM, &streamparm)) <
0) {
SetErrorState("Failed to set camera framerate");
return;
}
DVLOG(2) << "Actual camera driverframerate: "
<< streamparm.parm.capture.timeperframe.denominator << "/"
<< streamparm.parm.capture.timeperframe.numerator;
}
}
capture_format_.frame_size.SetSize(video_fmt.fmt.pix.width,
video_fmt.fmt.pix.height);
capture_format_.frame_rate = frame_rate;
capture_format_.pixel_format =
V4l2ColorToVideoCaptureColorFormat(video_fmt.fmt.pix.pixelformat);
if (!AllocateVideoBuffers()) {
SetErrorState("Allocate buffer failed");
return;
}
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMON, &type)) == -1) {
SetErrorState("VIDIOC_STREAMON failed");
return;
}
state_ = kCapturing;
v4l2_thread_.message_loop()->PostTask(
FROM_HERE,
base::Bind(&VideoCaptureDeviceLinux::OnCaptureTask,
base::Unretained(this)));
}
void VideoCaptureDeviceLinux::OnStopAndDeAllocate() {
DCHECK_EQ(v4l2_thread_.message_loop(), base::MessageLoop::current());
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_STREAMOFF, &type)) < 0) {
SetErrorState("VIDIOC_STREAMOFF failed");
return;
}
DeAllocateVideoBuffers();
device_fd_.reset();
state_ = kIdle;
client_.reset();
}
void VideoCaptureDeviceLinux::OnCaptureTask() {
DCHECK_EQ(v4l2_thread_.message_loop(), base::MessageLoop::current());
if (state_ != kCapturing) {
return;
}
fd_set r_set;
FD_ZERO(&r_set);
FD_SET(device_fd_.get(), &r_set);
timeval timeout;
timeout.tv_sec = 0;
timeout.tv_usec = kCaptureTimeoutUs;
int result =
HANDLE_EINTR(select(device_fd_.get() + 1, &r_set, NULL, NULL, &timeout));
if (result < 0) {
if (errno != EINTR) {
SetErrorState("Select failed");
return;
}
v4l2_thread_.message_loop()->PostDelayedTask(
FROM_HERE,
base::Bind(&VideoCaptureDeviceLinux::OnCaptureTask,
base::Unretained(this)),
base::TimeDelta::FromMilliseconds(kCaptureSelectWaitMs));
}
if (result == 0) {
timeout_count_++;
if (timeout_count_ >= kContinuousTimeoutLimit) {
SetErrorState(base::StringPrintf(
"Continuous timeout %d times", timeout_count_));
timeout_count_ = 0;
return;
}
} else {
timeout_count_ = 0;
}
if (FD_ISSET(device_fd_.get(), &r_set)) {
v4l2_buffer buffer;
memset(&buffer, 0, sizeof(buffer));
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_DQBUF, &buffer)) == 0) {
client_->OnIncomingCapturedData(
static_cast<uint8*>(buffer_pool_[buffer.index].start),
buffer.bytesused,
capture_format_,
0,
base::TimeTicks::Now());
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) == -1) {
SetErrorState(base::StringPrintf(
"Failed to enqueue capture buffer errno %d", errno));
}
} else {
SetErrorState(base::StringPrintf(
"Failed to dequeue capture buffer errno %d", errno));
return;
}
}
v4l2_thread_.message_loop()->PostTask(
FROM_HERE,
base::Bind(&VideoCaptureDeviceLinux::OnCaptureTask,
base::Unretained(this)));
}
bool VideoCaptureDeviceLinux::AllocateVideoBuffers() {
v4l2_requestbuffers r_buffer;
memset(&r_buffer, 0, sizeof(r_buffer));
r_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
r_buffer.memory = V4L2_MEMORY_MMAP;
r_buffer.count = kMaxVideoBuffers;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) {
return false;
}
if (r_buffer.count > kMaxVideoBuffers) {
r_buffer.count = kMaxVideoBuffers;
}
buffer_pool_size_ = r_buffer.count;
buffer_pool_ = new Buffer[r_buffer.count];
for (unsigned int i = 0; i < r_buffer.count; i++) {
v4l2_buffer buffer;
memset(&buffer, 0, sizeof(buffer));
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QUERYBUF, &buffer)) < 0) {
return false;
}
buffer_pool_[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
MAP_SHARED, device_fd_.get(), buffer.m.offset);
if (buffer_pool_[i].start == MAP_FAILED) {
return false;
}
buffer_pool_[i].length = buffer.length;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_QBUF, &buffer)) < 0) {
return false;
}
}
return true;
}
void VideoCaptureDeviceLinux::DeAllocateVideoBuffers() {
if (!buffer_pool_)
return;
for (int i = 0; i < buffer_pool_size_; i++) {
munmap(buffer_pool_[i].start, buffer_pool_[i].length);
}
v4l2_requestbuffers r_buffer;
memset(&r_buffer, 0, sizeof(r_buffer));
r_buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
r_buffer.memory = V4L2_MEMORY_MMAP;
r_buffer.count = 0;
if (HANDLE_EINTR(ioctl(device_fd_.get(), VIDIOC_REQBUFS, &r_buffer)) < 0) {
SetErrorState("Failed to reset buf.");
}
delete [] buffer_pool_;
buffer_pool_ = NULL;
buffer_pool_size_ = 0;
}
void VideoCaptureDeviceLinux::SetErrorState(const std::string& reason) {
DCHECK(!v4l2_thread_.IsRunning() ||
v4l2_thread_.message_loop() == base::MessageLoop::current());
DVLOG(1) << reason;
state_ = kError;
client_->OnError(reason);
}
}