This source file includes following definitions.
- xioctl
- icvInitCapture_V4L
- try_init_v4l
- try_init_v4l2
- v4l2_free_ranges
- v4l2_add_ctrl_range
- v4l2_get_ctrl_default
- v4l2_get_ctrl_min
- v4l2_get_ctrl_max
- v4l2_scan_controls
- channels_for_mode
- _capture_V4L2
- _capture_V4L
- icvCaptureFromCAM_V4L
- read_frame_v4l2
- mainloop_v4l2
- icvGrabFrameCAM_V4L
- icvRetrieveFrameCAM_V4L
- icvGetPropertyCAM_V4L
- icvSetVideoSize
- icvSetControl
- icvSetPropertyCAM_V4L
- icvCloseCAM_V4L
- open
- close
- grabFrame
- retrieveFrame
- getProperty
- setProperty
- cvCreateCameraCapture_V4L
#include "precomp.hpp"
#if !defined WIN32 && defined HAVE_LIBV4L
#define CLEAR(x) memset (&(x), 0, sizeof (x))
#include <stdio.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/types.h>
#include <sys/mman.h>
#include <string.h>
#include <stdlib.h>
#include <asm/types.h>
#include <assert.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#ifdef HAVE_CAMV4L
#include <linux/videodev.h>
#endif
#ifdef HAVE_CAMV4L2
#include <linux/videodev2.h>
#endif
#include <libv4l1.h>
#include <libv4l2.h>
#define DEFAULT_V4L_WIDTH 640
#define DEFAULT_V4L_HEIGHT 480
#define CHANNEL_NUMBER 1
#define MAX_CAMERAS 8
#define MAX_V4L_BUFFERS 10
#define DEFAULT_V4L_BUFFERS 4
#define USE_TEMP_BUFFER
#define MAX_DEVICE_DRIVER_NAME 80
struct buffer
{
void * start;
size_t length;
};
static unsigned int n_buffers = 0;
typedef struct v4l2_ctrl_range {
__u32 ctrl_id;
__s32 initial_value;
__s32 current_value;
__s32 minimum;
__s32 maximum;
__s32 default_value;
} v4l2_ctrl_range;
typedef struct CvCaptureCAM_V4L
{
char* deviceName;
int deviceHandle;
int bufferIndex;
int FirstCapture;
int width; int height;
int mode;
struct video_capability capability;
struct video_window captureWindow;
struct video_picture imageProperties;
struct video_mbuf memoryBuffer;
struct video_mmap *mmaps;
char *memoryMap;
IplImage frame;
buffer buffers[MAX_V4L_BUFFERS + 1];
struct v4l2_capability cap;
struct v4l2_input inp;
struct v4l2_format form;
struct v4l2_crop crop;
struct v4l2_cropcap cropcap;
struct v4l2_requestbuffers req;
struct v4l2_jpegcompression compr;
struct v4l2_control control;
enum v4l2_buf_type type;
struct v4l2_queryctrl queryctrl;
struct timeval timestamp;
int sequence;
v4l2_ctrl_range** v4l2_ctrl_ranges;
int v4l2_ctrl_count;
int is_v4l2_device;
}
CvCaptureCAM_V4L;
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture );
static int icvGrabFrameCAM_V4L( CvCaptureCAM_V4L* capture );
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int );
CvCapture* cvCreateCameraCapture_V4L( int index );
static double icvGetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, int property_id );
static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, int property_id, double value );
static int icvSetVideoSize( CvCaptureCAM_V4L* capture, int w, int h);
static int numCameras = 0;
static int indexList = 0;
#ifdef HAVE_IOCTL_ULONG
static int xioctl( int fd, unsigned long request, void *arg)
#else
static int xioctl( int fd, int request, void *arg)
#endif
{
int r;
do r = v4l2_ioctl (fd, request, arg);
while (-1 == r && EINTR == errno);
return r;
}
static void icvInitCapture_V4L() {
int deviceHandle;
int CameraNumber;
char deviceName[MAX_DEVICE_DRIVER_NAME];
CameraNumber = 0;
while(CameraNumber < MAX_CAMERAS) {
sprintf(deviceName, "/dev/video%1d", CameraNumber);
deviceHandle = open(deviceName, O_RDONLY);
if (deviceHandle != -1) {
indexList|=(1 << CameraNumber);
numCameras++;
}
if (deviceHandle != -1)
close(deviceHandle);
CameraNumber++;
}
};
static int try_init_v4l(CvCaptureCAM_V4L* capture, char *deviceName)
{
int detect = 0;
capture->deviceHandle = v4l1_open(deviceName, O_RDWR);
if (capture->deviceHandle == 0)
{
detect = -1;
icvCloseCAM_V4L(capture);
}
if (detect == 0)
{
if (v4l1_ioctl(capture->deviceHandle, VIDIOCGCAP, &capture->capability) < 0)
{
detect = 0;
icvCloseCAM_V4L(capture);
}
else
{
detect = 1;
}
}
return detect;
}
static int try_init_v4l2(CvCaptureCAM_V4L* capture, char *deviceName)
{
int detect = 0;
capture->deviceHandle = v4l2_open (deviceName, O_RDWR | O_NONBLOCK, 0);
if (capture->deviceHandle == 0)
{
detect = -1;
icvCloseCAM_V4L(capture);
}
if (detect == 0)
{
CLEAR (capture->cap);
if (-1 == xioctl (capture->deviceHandle, VIDIOC_QUERYCAP, &capture->cap))
{
detect = 0;
icvCloseCAM_V4L(capture);
}
else
{
CLEAR (capture->capability);
capture->capability.type = capture->cap.capabilities;
if (-1 != xioctl (capture->deviceHandle, VIDIOC_G_INPUT, &capture->capability.channels))
{
detect = 1;
}
}
}
return detect;
}
static void v4l2_free_ranges(CvCaptureCAM_V4L* capture) {
int i;
if (capture->v4l2_ctrl_ranges != NULL) {
for (i = 0; i < capture->v4l2_ctrl_count; i++) {
free(capture->v4l2_ctrl_ranges[i]);
}
}
free(capture->v4l2_ctrl_ranges);
capture->v4l2_ctrl_count = 0;
capture->v4l2_ctrl_ranges = NULL;
}
static void v4l2_add_ctrl_range(CvCaptureCAM_V4L* capture, v4l2_control* ctrl) {
v4l2_ctrl_range* range = (v4l2_ctrl_range*)malloc(sizeof(v4l2_ctrl_range));
range->ctrl_id = ctrl->id;
range->initial_value = ctrl->value;
range->current_value = ctrl->value;
range->minimum = capture->queryctrl.minimum;
range->maximum = capture->queryctrl.maximum;
range->default_value = capture->queryctrl.default_value;
capture->v4l2_ctrl_ranges[capture->v4l2_ctrl_count] = range;
capture->v4l2_ctrl_count += 1;
capture->v4l2_ctrl_ranges = (v4l2_ctrl_range**)realloc((v4l2_ctrl_range**)capture->v4l2_ctrl_ranges, (capture->v4l2_ctrl_count + 1) * sizeof(v4l2_ctrl_range*));
}
static int v4l2_get_ctrl_default(CvCaptureCAM_V4L* capture, __u32 id) {
int i;
for (i = 0; i < capture->v4l2_ctrl_count; i++) {
if (id == capture->v4l2_ctrl_ranges[i]->ctrl_id) {
return capture->v4l2_ctrl_ranges[i]->default_value;
}
}
return -1;
}
static int v4l2_get_ctrl_min(CvCaptureCAM_V4L* capture, __u32 id) {
int i;
for (i = 0; i < capture->v4l2_ctrl_count; i++) {
if (id == capture->v4l2_ctrl_ranges[i]->ctrl_id) {
return capture->v4l2_ctrl_ranges[i]->minimum;
}
}
return -1;
}
static int v4l2_get_ctrl_max(CvCaptureCAM_V4L* capture, __u32 id) {
int i;
for (i = 0; i < capture->v4l2_ctrl_count; i++) {
if (id == capture->v4l2_ctrl_ranges[i]->ctrl_id) {
return capture->v4l2_ctrl_ranges[i]->maximum;
}
}
return -1;
}
static void v4l2_scan_controls(CvCaptureCAM_V4L* capture) {
__u32 ctrl_id;
struct v4l2_control c;
if (capture->v4l2_ctrl_ranges != NULL) {
v4l2_free_ranges(capture);
}
capture->v4l2_ctrl_ranges = (v4l2_ctrl_range**)malloc(sizeof(v4l2_ctrl_range*));
#ifdef V4L2_CTRL_FLAG_NEXT_CTRL
capture->queryctrl.id = V4L2_CTRL_FLAG_NEXT_CTRL;
if(0 == v4l2_ioctl (capture->deviceHandle, VIDIOC_QUERYCTRL, &capture->queryctrl)) {
do {
c.id = capture->queryctrl.id;
capture->queryctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL;
if(capture->queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
continue;
}
if(capture->queryctrl.type != V4L2_CTRL_TYPE_INTEGER &&
capture->queryctrl.type != V4L2_CTRL_TYPE_BOOLEAN &&
capture->queryctrl.type != V4L2_CTRL_TYPE_MENU) {
continue;
}
if(v4l2_ioctl(capture->deviceHandle, VIDIOC_G_CTRL, &c) == 0) {
v4l2_add_ctrl_range(capture, &c);
}
} while(0 == v4l2_ioctl (capture->deviceHandle, VIDIOC_QUERYCTRL, &capture->queryctrl));
} else
#endif
{
for(ctrl_id=V4L2_CID_BASE; ctrl_id<V4L2_CID_LASTP1; ctrl_id++) {
capture->queryctrl.id = ctrl_id;
if(v4l2_ioctl(capture->deviceHandle, VIDIOC_QUERYCTRL, &capture->queryctrl) == 0) {
if(capture->queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
continue;
}
if(capture->queryctrl.type != V4L2_CTRL_TYPE_INTEGER &&
capture->queryctrl.type != V4L2_CTRL_TYPE_BOOLEAN &&
capture->queryctrl.type != V4L2_CTRL_TYPE_MENU) {
continue;
}
c.id = ctrl_id;
if(v4l2_ioctl(capture->deviceHandle, VIDIOC_G_CTRL, &c) == 0) {
v4l2_add_ctrl_range(capture, &c);
}
}
}
for(ctrl_id=V4L2_CID_PRIVATE_BASE; ; ctrl_id++) {
capture->queryctrl.id = ctrl_id;
if(v4l2_ioctl(capture->deviceHandle, VIDIOC_QUERYCTRL, &capture->queryctrl) == 0) {
if(capture->queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
continue;
}
if(capture->queryctrl.type != V4L2_CTRL_TYPE_INTEGER &&
capture->queryctrl.type != V4L2_CTRL_TYPE_BOOLEAN &&
capture->queryctrl.type != V4L2_CTRL_TYPE_MENU) {
continue;
}
c.id = ctrl_id;
if(v4l2_ioctl(capture->deviceHandle, VIDIOC_G_CTRL, &c) == 0) {
v4l2_add_ctrl_range(capture, &c);
}
} else {
break;
}
}
}
}
static inline int channels_for_mode(int mode)
{
switch(mode) {
case CV_CAP_MODE_GRAY:
return 1;
case CV_CAP_MODE_YUYV:
return 2;
default:
return 3;
}
}
static int _capture_V4L2 (CvCaptureCAM_V4L *capture, char *deviceName)
{
int detect_v4l2 = 0;
capture->deviceName = strdup(deviceName);
detect_v4l2 = try_init_v4l2(capture, deviceName);
if (detect_v4l2 != 1) {
return -1;
}
capture->is_v4l2_device = 1;
capture->v4l2_ctrl_ranges = NULL;
capture->v4l2_ctrl_count = 0;
v4l2_scan_controls(capture);
if ((capture->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: device %s is unable to capture video memory.\n",deviceName);
icvCloseCAM_V4L(capture);
return -1;
}
if(capture->inp.index > 0) {
CLEAR (capture->inp);
capture->inp.index = CHANNEL_NUMBER;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp))
{
fprintf (stderr, "VIDEOIO ERROR: V4L2: Aren't able to set channel number\n");
icvCloseCAM_V4L (capture);
return -1;
}
}
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: Could not obtain specifics of capture window.\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
unsigned int requestedPixelFormat;
switch (capture->mode) {
case CV_CAP_MODE_RGB:
requestedPixelFormat = V4L2_PIX_FMT_RGB24;
break;
case CV_CAP_MODE_GRAY:
requestedPixelFormat = V4L2_PIX_FMT_YUV420;
break;
case CV_CAP_MODE_YUYV:
requestedPixelFormat = V4L2_PIX_FMT_YUYV;
break;
default:
requestedPixelFormat = V4L2_PIX_FMT_BGR24;
break;
}
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->form.fmt.pix.pixelformat = requestedPixelFormat;
capture->form.fmt.pix.field = V4L2_FIELD_ANY;
capture->form.fmt.pix.width = capture->width;
capture->form.fmt.pix.height = capture->height;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form)) {
fprintf(stderr, "VIDEOIO ERROR: libv4l unable to ioctl S_FMT\n");
return -1;
}
if (requestedPixelFormat != capture->form.fmt.pix.pixelformat) {
fprintf( stderr, "VIDEOIO ERROR: libv4l unable convert to requested pixfmt\n");
return -1;
}
unsigned int min;
min = capture->form.fmt.pix.width * 2;
if (capture->form.fmt.pix.bytesperline < min)
capture->form.fmt.pix.bytesperline = min;
min = capture->form.fmt.pix.bytesperline * capture->form.fmt.pix.height;
if (capture->form.fmt.pix.sizeimage < min)
capture->form.fmt.pix.sizeimage = min;
CLEAR (capture->req);
unsigned int buffer_number = DEFAULT_V4L_BUFFERS;
try_again:
capture->req.count = buffer_number;
capture->req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->req.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_REQBUFS, &capture->req))
{
if (EINVAL == errno)
{
fprintf (stderr, "%s does not support memory mapping\n", deviceName);
} else {
perror ("VIDIOC_REQBUFS");
}
icvCloseCAM_V4L (capture);
return -1;
}
if (capture->req.count < buffer_number)
{
if (buffer_number == 1)
{
fprintf (stderr, "Insufficient buffer memory on %s\n", deviceName);
icvCloseCAM_V4L (capture);
return -1;
} else {
buffer_number--;
fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName);
goto try_again;
}
}
for (n_buffers = 0; n_buffers < capture->req.count; ++n_buffers)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_QUERYBUF, &buf)) {
perror ("VIDIOC_QUERYBUF");
icvCloseCAM_V4L (capture);
return -1;
}
capture->buffers[n_buffers].length = buf.length;
capture->buffers[n_buffers].start =
v4l2_mmap (NULL ,
buf.length,
PROT_READ | PROT_WRITE ,
MAP_SHARED ,
capture->deviceHandle, buf.m.offset);
if (MAP_FAILED == capture->buffers[n_buffers].start) {
perror ("mmap");
icvCloseCAM_V4L (capture);
return -1;
}
#ifdef USE_TEMP_BUFFER
if (n_buffers == 0) {
if (capture->buffers[MAX_V4L_BUFFERS].start) {
free(capture->buffers[MAX_V4L_BUFFERS].start);
capture->buffers[MAX_V4L_BUFFERS].start = NULL;
}
capture->buffers[MAX_V4L_BUFFERS].start = malloc(buf.length);
capture->buffers[MAX_V4L_BUFFERS].length = buf.length;
};
#endif
}
cvInitImageHeader( &capture->frame,
cvSize( capture->captureWindow.width,
capture->captureWindow.height ),
IPL_DEPTH_8U, channels_for_mode(capture->mode),
IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
return 1;
};
static int _capture_V4L (CvCaptureCAM_V4L *capture, char *deviceName)
{
int detect_v4l = 0;
detect_v4l = try_init_v4l(capture, deviceName);
if (detect_v4l == -1)
{
fprintf (stderr, "VIDEOIO ERROR: V4L"
": device %s: Unable to open for READ ONLY\n", deviceName);
return -1;
}
if (detect_v4l <= 0)
{
fprintf (stderr, "VIDEOIO ERROR: V4L"
": device %s: Unable to query number of channels\n", deviceName);
return -1;
}
{
if ((capture->capability.type & VID_TYPE_CAPTURE) == 0) {
fprintf( stderr, "VIDEOIO ERROR: V4L: "
"device %s is unable to capture video memory.\n",deviceName);
icvCloseCAM_V4L(capture);
return -1;
}
}
{
if(capture->capability.channels>0) {
struct video_channel selectedChannel;
selectedChannel.channel=CHANNEL_NUMBER;
if (v4l1_ioctl(capture->deviceHandle, VIDIOCGCHAN , &selectedChannel) != -1) {
if (v4l1_ioctl(capture->deviceHandle, VIDIOCSCHAN , &selectedChannel) == -1) {
}
}
}
}
{
if(v4l1_ioctl(capture->deviceHandle, VIDIOCGWIN, &capture->captureWindow) == -1) {
fprintf( stderr, "VIDEOIO ERROR: V4L: "
"Could not obtain specifics of capture window.\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
}
{
if(v4l1_ioctl(capture->deviceHandle, VIDIOCGPICT, &capture->imageProperties) < 0) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Unable to determine size of incoming image\n");
icvCloseCAM_V4L(capture);
return -1;
}
int requestedVideoPalette;
int depth;
switch (capture->mode) {
case CV_CAP_MODE_GRAY:
requestedVideoPalette = VIDEO_PALETTE_YUV420;
depth = 8;
break;
case CV_CAP_MODE_YUYV:
requestedVideoPalette = VIDEO_PALETTE_YUYV;
depth = 16;
break;
default:
requestedVideoPalette = VIDEO_PALETTE_RGB24;
depth = 24;
break;
}
capture->imageProperties.depth = depth;
capture->imageProperties.palette = requestedVideoPalette;
if (v4l1_ioctl(capture->deviceHandle, VIDIOCSPICT, &capture->imageProperties) < 0) {
fprintf( stderr, "VIDEOIO ERROR: libv4l unable to ioctl VIDIOCSPICT\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
if (v4l1_ioctl(capture->deviceHandle, VIDIOCGPICT, &capture->imageProperties) < 0) {
fprintf( stderr, "VIDEOIO ERROR: libv4l unable to ioctl VIDIOCGPICT\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
if (capture->imageProperties.palette != requestedVideoPalette) {
fprintf( stderr, "VIDEOIO ERROR: libv4l unable convert to requested pixfmt\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
}
{
v4l1_ioctl(capture->deviceHandle, VIDIOCGMBUF, &capture->memoryBuffer);
capture->memoryMap = (char *)v4l1_mmap(0,
capture->memoryBuffer.size,
PROT_READ | PROT_WRITE,
MAP_SHARED,
capture->deviceHandle,
0);
if (capture->memoryMap == MAP_FAILED) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Mapping Memmory from video source error: %s\n", strerror(errno));
icvCloseCAM_V4L(capture);
return -1;
}
capture->mmaps = (struct video_mmap *)
(malloc(capture->memoryBuffer.frames * sizeof(struct video_mmap)));
if (!capture->mmaps) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Could not memory map video frames.\n");
icvCloseCAM_V4L(capture);
return -1;
}
}
cvInitImageHeader( &capture->frame,
cvSize( capture->captureWindow.width,
capture->captureWindow.height ),
IPL_DEPTH_8U, channels_for_mode(capture->mode),
IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
return 1;
};
static CvCaptureCAM_V4L * icvCaptureFromCAM_V4L (int index)
{
static int autoindex;
autoindex = 0;
char deviceName[MAX_DEVICE_DRIVER_NAME];
if (!numCameras)
icvInitCapture_V4L();
if (!numCameras)
return NULL;
if ( (index>-1) && ! ((1 << index) & indexList) )
{
fprintf( stderr, "VIDEOIO ERROR: V4L: index %d is not correct!\n",index);
return NULL;
}
CvCaptureCAM_V4L * capture = (CvCaptureCAM_V4L*)cvAlloc(sizeof(CvCaptureCAM_V4L));
if (!capture) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Could not allocate memory for capture process.\n");
return NULL;
}
#ifdef USE_TEMP_BUFFER
capture->buffers[MAX_V4L_BUFFERS].start = NULL;
#endif
if (index<0) {
for (; autoindex<MAX_CAMERAS;autoindex++)
if (indexList & (1<<autoindex))
break;
if (autoindex==MAX_CAMERAS)
return NULL;
index=autoindex;
autoindex++;
}
sprintf(deviceName, "/dev/video%1d", index);
memset(capture,0,sizeof(CvCaptureCAM_V4L));
capture->FirstCapture = 1;
capture->width = DEFAULT_V4L_WIDTH;
capture->height = DEFAULT_V4L_HEIGHT;
if (_capture_V4L2 (capture, deviceName) == -1) {
icvCloseCAM_V4L(capture);
capture->is_v4l2_device = 0;
if (_capture_V4L (capture, deviceName) == -1) {
icvCloseCAM_V4L(capture);
return NULL;
}
} else {
capture->is_v4l2_device = 1;
}
return capture;
};
#ifdef HAVE_CAMV4L2
static int read_frame_v4l2(CvCaptureCAM_V4L* capture) {
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_DQBUF, &buf)) {
switch (errno) {
case EAGAIN:
return 0;
case EIO:
default:
perror ("VIDIOC_DQBUF");
return 1;
}
}
assert(buf.index < capture->req.count);
#ifdef USE_TEMP_BUFFER
memcpy(capture->buffers[MAX_V4L_BUFFERS].start,
capture->buffers[buf.index].start,
capture->buffers[MAX_V4L_BUFFERS].length );
capture->bufferIndex = MAX_V4L_BUFFERS;
#else
capture->bufferIndex = buf.index;
#endif
if (-1 == xioctl (capture->deviceHandle, VIDIOC_QBUF, &buf))
perror ("VIDIOC_QBUF");
capture->timestamp = buf.timestamp;
capture->sequence = buf.sequence;
return 1;
}
static void mainloop_v4l2(CvCaptureCAM_V4L* capture) {
unsigned int count;
count = 1;
while (count-- > 0) {
for (;;) {
fd_set fds;
struct timeval tv;
int r;
FD_ZERO (&fds);
FD_SET (capture->deviceHandle, &fds);
tv.tv_sec = 10;
tv.tv_usec = 0;
r = select (capture->deviceHandle+1, &fds, NULL, NULL, &tv);
if (-1 == r) {
if (EINTR == errno)
continue;
perror ("select");
}
if (0 == r) {
fprintf (stderr, "select timeout\n");
break;
}
if (read_frame_v4l2 (capture))
break;
}
}
}
static int icvGrabFrameCAM_V4L(CvCaptureCAM_V4L* capture) {
if (capture->FirstCapture) {
if (capture->is_v4l2_device == 1)
{
for (capture->bufferIndex = 0;
capture->bufferIndex < ((int)capture->req.count);
++capture->bufferIndex)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = (unsigned long)capture->bufferIndex;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) {
perror ("VIDIOC_QBUF");
return 0;
}
}
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_STREAMON,
&capture->type)) {
perror ("VIDIOC_STREAMON");
return 0;
}
} else
{
for (capture->bufferIndex = 0;
capture->bufferIndex < (capture->memoryBuffer.frames-1);
++capture->bufferIndex) {
capture->mmaps[capture->bufferIndex].frame = capture->bufferIndex;
capture->mmaps[capture->bufferIndex].width = capture->captureWindow.width;
capture->mmaps[capture->bufferIndex].height = capture->captureWindow.height;
capture->mmaps[capture->bufferIndex].format = capture->imageProperties.palette;
if (v4l1_ioctl(capture->deviceHandle, VIDIOCMCAPTURE, &capture->mmaps[capture->bufferIndex]) == -1) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Initial Capture Error: Unable to load initial memory buffers.\n");
return 0;
}
}
}
capture->FirstCapture = 0;
}
if (capture->is_v4l2_device == 1)
{
mainloop_v4l2(capture);
} else
{
capture->mmaps[capture->bufferIndex].frame = capture->bufferIndex;
capture->mmaps[capture->bufferIndex].width = capture->captureWindow.width;
capture->mmaps[capture->bufferIndex].height = capture->captureWindow.height;
capture->mmaps[capture->bufferIndex].format = capture->imageProperties.palette;
if (v4l1_ioctl (capture->deviceHandle, VIDIOCMCAPTURE,
&capture->mmaps[capture->bufferIndex]) == -1) {
return 1;
}
++capture->bufferIndex;
if (capture->bufferIndex == capture->memoryBuffer.frames) {
capture->bufferIndex = 0;
}
}
return(1);
}
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) {
if (capture->is_v4l2_device == 0)
{
if (v4l1_ioctl(capture->deviceHandle, VIDIOCSYNC, &capture->mmaps[capture->bufferIndex].frame) == -1) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Could not SYNC to video stream. %s\n", strerror(errno));
}
}
if (capture->is_v4l2_device == 1)
{
if(((unsigned long)capture->frame.width != capture->form.fmt.pix.width)
|| ((unsigned long)capture->frame.height != capture->form.fmt.pix.height)) {
cvFree(&capture->frame.imageData);
cvInitImageHeader( &capture->frame,
cvSize( capture->form.fmt.pix.width,
capture->form.fmt.pix.height ),
IPL_DEPTH_8U, channels_for_mode(capture->mode),
IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
}
} else
{
if((capture->frame.width != capture->mmaps[capture->bufferIndex].width)
|| (capture->frame.height != capture->mmaps[capture->bufferIndex].height)) {
cvFree(&capture->frame.imageData);
cvInitImageHeader( &capture->frame,
cvSize( capture->captureWindow.width,
capture->captureWindow.height ),
IPL_DEPTH_8U, channels_for_mode(capture->mode),
IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
}
}
if (capture->is_v4l2_device == 1)
{
if(capture->buffers[capture->bufferIndex].start){
memcpy((char *)capture->frame.imageData,
(char *)capture->buffers[capture->bufferIndex].start,
capture->frame.imageSize);
}
} else
#endif
{
switch(capture->imageProperties.palette) {
case VIDEO_PALETTE_RGB24:
case VIDEO_PALETTE_YUV420:
case VIDEO_PALETTE_YUYV:
memcpy((char *)capture->frame.imageData,
(char *)(capture->memoryMap + capture->memoryBuffer.offsets[capture->bufferIndex]),
capture->frame.imageSize);
break;
default:
fprintf( stderr,
"VIDEOIO ERROR: V4L: Cannot convert from palette %d to mode %d\n",
capture->imageProperties.palette,
capture->mode);
return 0;
}
}
return(&capture->frame);
}
static double icvGetPropertyCAM_V4L (CvCaptureCAM_V4L* capture,
int property_id ) {
char name[32];
int is_v4l2_device = 0;
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
case CV_CAP_PROP_FRAME_HEIGHT:
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) {
perror ("VIDIOC_G_FMT");
if (v4l1_ioctl (capture->deviceHandle, VIDIOCGWIN, &capture->captureWindow) < 0) {
fprintf (stderr, " ERROR: V4L: Unable to determine size of incoming image\n");
icvCloseCAM_V4L(capture);
return -1;
} else {
int retval = (property_id == CV_CAP_PROP_FRAME_WIDTH)?capture->captureWindow.width:capture->captureWindow.height;
return retval / 0xFFFF;
}
}
return (property_id == CV_CAP_PROP_FRAME_WIDTH)?capture->form.fmt.pix.width:capture->form.fmt.pix.height;
case CV_CAP_PROP_POS_MSEC:
if (capture->FirstCapture) {
return 0;
} else {
return 1000 * capture->timestamp.tv_sec + ((double) capture->timestamp.tv_usec) / 1000;
}
break;
case CV_CAP_PROP_POS_FRAMES:
return capture->sequence;
break;
case CV_CAP_PROP_FPS: {
struct v4l2_streamparm sp;
memset (&sp, 0, sizeof(struct v4l2_streamparm));
sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl (capture->deviceHandle, VIDIOC_G_PARM, &sp) < 0){
fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to get camera FPS\n");
return (double) -1;
}
double framesPerSec = sp.parm.capture.timeperframe.denominator / (double) sp.parm.capture.timeperframe.numerator ;
return framesPerSec;
}
break;
case CV_CAP_PROP_MODE:
return capture->mode;
break;
case CV_CAP_PROP_BRIGHTNESS:
sprintf(name, "Brightness");
capture->control.id = V4L2_CID_BRIGHTNESS;
break;
case CV_CAP_PROP_CONTRAST:
sprintf(name, "Contrast");
capture->control.id = V4L2_CID_CONTRAST;
break;
case CV_CAP_PROP_SATURATION:
sprintf(name, "Saturation");
capture->control.id = V4L2_CID_SATURATION;
break;
case CV_CAP_PROP_HUE:
sprintf(name, "Hue");
capture->control.id = V4L2_CID_HUE;
break;
case CV_CAP_PROP_GAIN:
sprintf(name, "Gain");
capture->control.id = V4L2_CID_GAIN;
break;
case CV_CAP_PROP_EXPOSURE:
sprintf(name, "Exposure");
capture->control.id = V4L2_CID_EXPOSURE;
break;
default:
sprintf(name, "<unknown property string>");
capture->control.id = property_id;
}
if(v4l2_ioctl(capture->deviceHandle, VIDIOC_G_CTRL, &capture->control) == 0) {
is_v4l2_device = 1;
} else {
fprintf(stderr, "VIDEOIO ERROR: V4L2: Unable to get property %s(%u) - %s\n", name, capture->control.id, strerror(errno));
}
if (is_v4l2_device == 1) {
int v4l2_min = v4l2_get_ctrl_min(capture, capture->control.id);
int v4l2_max = v4l2_get_ctrl_max(capture, capture->control.id);
if ((v4l2_min == -1) && (v4l2_max == -1)) {
fprintf(stderr, "VIDEOIO ERROR: V4L2: Property %s(%u) not supported by device\n", name, property_id);
return -1;
}
return ((float)capture->control.value - v4l2_min) / (v4l2_max - v4l2_min);
} else {
int retval = -1;
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
retval = capture->imageProperties.brightness;
break;
case CV_CAP_PROP_CONTRAST:
retval = capture->imageProperties.contrast;
break;
case CV_CAP_PROP_SATURATION:
retval = capture->imageProperties.colour;
break;
case CV_CAP_PROP_HUE:
retval = capture->imageProperties.hue;
break;
case CV_CAP_PROP_GAIN:
fprintf(stderr, "VIDEOIO ERROR: V4L: Gain control in V4L is not supported\n");
return -1;
break;
case CV_CAP_PROP_EXPOSURE:
fprintf(stderr, "VIDEOIO ERROR: V4L: Exposure control in V4L is not supported\n");
return -1;
break;
}
if (retval == -1) {
return -1;
}
return float (retval) / 0xFFFF;
}
}
static int icvSetVideoSize( CvCaptureCAM_V4L* capture, int w, int h) {
if (capture->is_v4l2_device == 1)
{
char deviceName[MAX_DEVICE_DRIVER_NAME];
sprintf(deviceName, "%s", capture->deviceName);
icvCloseCAM_V4L(capture);
_capture_V4L2(capture, deviceName);
int cropHeight;
int cropWidth;
switch (capture->mode) {
case CV_CAP_MODE_GRAY:
cropHeight = h*8;
cropWidth = w*8;
break;
case CV_CAP_MODE_YUYV:
cropHeight = h*16;
cropWidth = w*16;
break;
default:
cropHeight = h*24;
cropWidth = w*24;
break;
}
CLEAR (capture->crop);
capture->crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->crop.c.left = 0;
capture->crop.c.top = 0;
capture->crop.c.height = cropHeight;
capture->crop.c.width = cropWidth;
xioctl (capture->deviceHandle, VIDIOC_S_CROP, &capture->crop);
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
xioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form);
capture->form.fmt.pix.width = w;
capture->form.fmt.pix.height = h;
capture->form.fmt.win.chromakey = 0;
capture->form.fmt.win.field = V4L2_FIELD_ANY;
capture->form.fmt.win.clips = 0;
capture->form.fmt.win.clipcount = 0;
capture->form.fmt.pix.field = V4L2_FIELD_ANY;
xioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form);
struct v4l2_streamparm setfps;
memset (&setfps, 0, sizeof(struct v4l2_streamparm));
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.parm.capture.timeperframe.numerator = 1;
setfps.parm.capture.timeperframe.denominator = 30;
xioctl (capture->deviceHandle, VIDIOC_S_PARM, &setfps);
capture->FirstCapture = 1;
if (-1 == xioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form))
{
fprintf(stderr, "VIDEOIO ERROR: V4L/V4L2: Could not obtain specifics of capture window.\n\n");
icvCloseCAM_V4L(capture);
return 0;
}
return 0;
} else
{
if (capture==0) return 0;
if (w>capture->capability.maxwidth) {
w=capture->capability.maxwidth;
}
if (h>capture->capability.maxheight) {
h=capture->capability.maxheight;
}
capture->captureWindow.width=w;
capture->captureWindow.height=h;
if (ioctl(capture->deviceHandle, VIDIOCSWIN, &capture->captureWindow) < 0) {
icvCloseCAM_V4L(capture);
return 0;
}
if (ioctl(capture->deviceHandle, VIDIOCGWIN, &capture->captureWindow) < 0) {
icvCloseCAM_V4L(capture);
return 0;
}
capture->FirstCapture = 1;
}
return 0;
}
static int icvSetControl (CvCaptureCAM_V4L* capture, int property_id, double value) {
struct v4l2_control c;
__s32 ctrl_value;
char name[32];
int is_v4l2 = 1;
int v4l2_min = 0;
int v4l2_max = 255;
if (capture->v4l2_ctrl_ranges == NULL) {
v4l2_scan_controls(capture);
}
CLEAR (capture->control);
CLEAR (capture->queryctrl);
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
sprintf(name, "Brightness");
capture->control.id = V4L2_CID_BRIGHTNESS;
break;
case CV_CAP_PROP_CONTRAST:
sprintf(name, "Contrast");
capture->control.id = V4L2_CID_CONTRAST;
break;
case CV_CAP_PROP_SATURATION:
sprintf(name, "Saturation");
capture->control.id = V4L2_CID_SATURATION;
break;
case CV_CAP_PROP_HUE:
sprintf(name, "Hue");
capture->control.id = V4L2_CID_HUE;
break;
case CV_CAP_PROP_GAIN:
sprintf(name, "Gain");
capture->control.id = V4L2_CID_GAIN;
break;
case CV_CAP_PROP_EXPOSURE:
sprintf(name, "Exposure");
capture->control.id = V4L2_CID_EXPOSURE;
break;
default:
sprintf(name, "<unknown property string>");
capture->control.id = property_id;
}
v4l2_min = v4l2_get_ctrl_min(capture, capture->control.id);
v4l2_max = v4l2_get_ctrl_max(capture, capture->control.id);
if ((v4l2_min == -1) && (v4l2_max == -1)) {
fprintf(stderr, "VIDEOIO ERROR: V4L: Property %s(%u) not supported by device\n", name, property_id);
return -1;
}
if(v4l2_ioctl(capture->deviceHandle, VIDIOC_G_CTRL, &capture->control) == 0) {
} else {
fprintf(stderr, "VIDEOIO ERROR: V4L2: Unable to get property %s(%u) - %s\n", name, capture->control.id, strerror(errno));
}
if (v4l2_max != 0) {
double val = value;
if (value < 0.0) {
val = 0.0;
} else if (value > 1.0) {
val = 1.0;
}
ctrl_value = val * (double)(v4l2_max - v4l2_min) + v4l2_min;
} else {
ctrl_value = v4l2_get_ctrl_default(capture, capture->control.id) * (double)(v4l2_max - v4l2_min) + v4l2_min;
}
c.id = capture->control.id;
c.value = ctrl_value;
if (v4l2_ioctl(capture->deviceHandle, VIDIOC_S_CTRL, &c) != 0) {
if (errno != ERANGE) {
fprintf(stderr, "VIDEOIO ERROR: V4L2: Failed to set control \"%d\": %s (value %d)\n", c.id, strerror(errno), c.value);
is_v4l2 = 0;
} else {
return 0;
}
} else {
return 0;
}
if (is_v4l2 == 0) {
fprintf(stderr, "VIDEOIO WARNING: Setting property %u through v4l2 failed. Trying with v4l1.\n", c.id);
int v4l_value;
v4l_value = (int)(0xFFFF * value);
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
capture->imageProperties.brightness = v4l_value;
break;
case CV_CAP_PROP_CONTRAST:
capture->imageProperties.contrast = v4l_value;
break;
case CV_CAP_PROP_SATURATION:
capture->imageProperties.colour = v4l_value;
break;
case CV_CAP_PROP_HUE:
capture->imageProperties.hue = v4l_value;
break;
case CV_CAP_PROP_GAIN:
fprintf(stderr, "VIDEOIO ERROR: V4L: Gain control in V4L is not supported\n");
return -1;
case CV_CAP_PROP_EXPOSURE:
fprintf(stderr, "VIDEOIO ERROR: V4L: Exposure control in V4L is not supported\n");
return -1;
default:
fprintf(stderr, "VIDEOIO ERROR: V4L: property #%d is not supported\n", property_id);
return -1;
}
if (v4l1_ioctl(capture->deviceHandle, VIDIOCSPICT, &capture->imageProperties) < 0){
fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to set video informations\n");
icvCloseCAM_V4L(capture);
return -1;
}
}
return 0;
}
static int icvSetPropertyCAM_V4L(CvCaptureCAM_V4L* capture, int property_id, double value){
static int width = 0, height = 0;
int retval;
retval = 0;
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
width = cvRound(value);
capture->width = width;
if(width !=0 && height != 0) {
retval = icvSetVideoSize( capture, width, height);
width = height = 0;
}
break;
case CV_CAP_PROP_FRAME_HEIGHT:
height = cvRound(value);
capture->height = height;
if(width !=0 && height != 0) {
retval = icvSetVideoSize( capture, width, height);
width = height = 0;
}
break;
case CV_CAP_PROP_MODE:
int mode;
mode = cvRound(value);
if (capture->mode != mode) {
switch (mode) {
case CV_CAP_MODE_BGR:
case CV_CAP_MODE_RGB:
case CV_CAP_MODE_GRAY:
case CV_CAP_MODE_YUYV:
capture->mode = mode;
retval = icvSetVideoSize(capture, capture->width, capture->height);
break;
default:
fprintf(stderr, "VIDEOIO ERROR: V4L/V4L2: Unsupported mode: %d\n", mode);
retval=0;
break;
}
}
break;
case CV_CAP_PROP_FPS:
struct v4l2_streamparm setfps;
memset (&setfps, 0, sizeof(struct v4l2_streamparm));
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.parm.capture.timeperframe.numerator = 1;
setfps.parm.capture.timeperframe.denominator = value;
if (xioctl (capture->deviceHandle, VIDIOC_S_PARM, &setfps) < 0){
fprintf(stderr, "VIDEOIO ERROR: V4L: Unable to set camera FPS\n");
retval=0;
}
break;
default:
retval = icvSetControl(capture, property_id, value);
}
return retval;
}
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ){
if (capture) {
v4l2_free_ranges(capture);
if (capture->is_v4l2_device == 0) {
if (capture->mmaps) {
free(capture->mmaps);
}
if (capture->memoryMap) {
v4l1_munmap(capture->memoryMap, capture->memoryBuffer.size);
}
if (capture->deviceHandle != -1) {
v4l1_close(capture->deviceHandle);
}
} else {
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(capture->deviceHandle, VIDIOC_STREAMOFF, &capture->type) < 0) {
perror ("Unable to stop the stream.");
}
for (unsigned int n_buffers2 = 0; n_buffers2 < capture->req.count; ++n_buffers2) {
if (-1 == v4l2_munmap (capture->buffers[n_buffers2].start, capture->buffers[n_buffers2].length)) {
perror ("munmap");
}
}
if (capture->deviceHandle != -1) {
v4l2_close(capture->deviceHandle);
}
}
if (capture->frame.imageData)
cvFree(&capture->frame.imageData);
#ifdef USE_TEMP_BUFFER
if (capture->buffers[MAX_V4L_BUFFERS].start) {
free(capture->buffers[MAX_V4L_BUFFERS].start);
capture->buffers[MAX_V4L_BUFFERS].start = NULL;
}
#endif
free(capture->deviceName);
capture->deviceName = NULL;
}
};
class CvCaptureCAM_V4L_CPP : CvCapture
{
public:
CvCaptureCAM_V4L_CPP() { captureV4L = 0; }
virtual ~CvCaptureCAM_V4L_CPP() { close(); }
virtual bool open( int index );
virtual void close();
virtual double getProperty(int) const;
virtual bool setProperty(int, double);
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
protected:
CvCaptureCAM_V4L* captureV4L;
};
bool CvCaptureCAM_V4L_CPP::open( int index )
{
close();
captureV4L = icvCaptureFromCAM_V4L(index);
return captureV4L != 0;
}
void CvCaptureCAM_V4L_CPP::close()
{
if( captureV4L )
{
icvCloseCAM_V4L( captureV4L );
cvFree( &captureV4L );
}
}
bool CvCaptureCAM_V4L_CPP::grabFrame()
{
return captureV4L ? icvGrabFrameCAM_V4L( captureV4L ) != 0 : false;
}
IplImage* CvCaptureCAM_V4L_CPP::retrieveFrame(int)
{
return captureV4L ? icvRetrieveFrameCAM_V4L( captureV4L, 0 ) : 0;
}
double CvCaptureCAM_V4L_CPP::getProperty( int propId ) const
{
return captureV4L ? icvGetPropertyCAM_V4L( captureV4L, propId ) : 0.0;
}
bool CvCaptureCAM_V4L_CPP::setProperty( int propId, double value )
{
return captureV4L ? icvSetPropertyCAM_V4L( captureV4L, propId, value ) != 0 : false;
}
CvCapture* cvCreateCameraCapture_V4L( int index )
{
CvCaptureCAM_V4L_CPP* capture = new CvCaptureCAM_V4L_CPP;
if( capture->open( index ))
return (CvCapture*)capture;
delete capture;
return 0;
}
#endif