This source file includes following definitions.
- icvInitCapture_V4L
- try_palette
- try_palette_v4l2
- try_init_v4l
- try_init_v4l2
- autosetup_capture_mode_v4l2
- autosetup_capture_mode_v4l
- v4l2_scan_controls
- _capture_V4L2
- _capture_V4L
- icvCaptureFromCAM_V4L
- read_frame_v4l2
- mainloop_v4l2
- icvGrabFrameCAM_V4L
- move_411_block
- yuv420p_to_rgb24
- yuv420_to_rgb24
- yuv411p_to_rgb24
- yuyv_to_rgb24
- uyvy_to_rgb24
- mjpeg_to_rgb24
- bayer2rgb24
- sgbrg2rgb24
- rgb24_to_rgb24
- sonix_decompress_init
- sonix_decompress
- icvRetrieveFrameCAM_V4L
- icvGetPropertyCAM_V4L
- icvSetVideoSize
- icvSetControl
- icvSetPropertyCAM_V4L
- icvCloseCAM_V4L
- open
- close
- grabFrame
- retrieveFrame
- getProperty
- setProperty
- cvCreateCameraCapture_V4L
#include "precomp.hpp"
#if !defined WIN32 && (defined HAVE_CAMV4L || defined HAVE_CAMV4L2 || defined HAVE_VIDEOIO)
#define CLEAR(x) memset (&(x), 0, sizeof (x))
#include <stdio.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/mman.h>
#ifdef HAVE_CAMV4L
#include <linux/videodev.h>
#endif
#include <string.h>
#include <stdlib.h>
#include <assert.h>
#include <sys/stat.h>
#include <sys/ioctl.h>
#ifdef HAVE_CAMV4L2
#include <asm/types.h>
#include <linux/videodev2.h>
#endif
#ifdef HAVE_VIDEOIO
#include <sys/videoio.h>
#define HAVE_CAMV4L2
#endif
#define DEFAULT_V4L_WIDTH 640
#define DEFAULT_V4L_HEIGHT 480
#define CHANNEL_NUMBER 1
#define MAX_CAMERAS 8
#define MAX_V4L_BUFFERS 10
#define DEFAULT_V4L_BUFFERS 4
#define V4L_ABORT_BADJPEG
#define MAX_DEVICE_DRIVER_NAME 80
#ifdef HAVE_CAMV4L2
struct buffer
{
void * start;
size_t length;
};
static unsigned int n_buffers = 0;
#ifndef V4L2_PIX_FMT_SBGGR8
#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B','A','8','1')
#endif
#ifndef V4L2_PIX_FMT_SN9C10X
#define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S','9','1','0')
#endif
#ifndef V4L2_PIX_FMT_SGBRG
#define V4L2_PIX_FMT_SGBRG v4l2_fourcc('G','B','R','G')
#endif
#endif
enum PALETTE_TYPE {
PALETTE_BGR24 = 1,
PALETTE_YVU420,
PALETTE_YUV411P,
PALETTE_YUYV,
PALETTE_UYVY,
PALETTE_SBGGR8,
PALETTE_SN9C10X,
PALETTE_MJPEG,
PALETTE_SGBRG,
PALETTE_RGB24
};
typedef struct CvCaptureCAM_V4L
{
int deviceHandle;
int bufferIndex;
int FirstCapture;
#ifdef HAVE_CAMV4L
struct video_capability capability;
struct video_window captureWindow;
struct video_picture imageProperties;
struct video_mbuf memoryBuffer;
struct video_mmap *mmaps;
#endif
char *memoryMap;
IplImage frame;
#ifdef HAVE_CAMV4L2
enum PALETTE_TYPE palette;
buffer buffers[MAX_V4L_BUFFERS + 1];
struct v4l2_capability cap;
struct v4l2_input inp;
struct v4l2_format form;
struct v4l2_crop crop;
struct v4l2_cropcap cropcap;
struct v4l2_requestbuffers req;
struct v4l2_control control;
enum v4l2_buf_type type;
struct v4l2_queryctrl queryctrl;
struct timeval timestamp;
int v4l2_brightness, v4l2_brightness_min, v4l2_brightness_max;
int v4l2_contrast, v4l2_contrast_min, v4l2_contrast_max;
int v4l2_saturation, v4l2_saturation_min, v4l2_saturation_max;
int v4l2_hue, v4l2_hue_min, v4l2_hue_max;
int v4l2_gain, v4l2_gain_min, v4l2_gain_max;
int v4l2_exposure, v4l2_exposure_min, v4l2_exposure_max;
#endif
}
CvCaptureCAM_V4L;
#ifdef HAVE_CAMV4L2
int V4L2_SUPPORT = 0;
#endif
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture );
static int icvGrabFrameCAM_V4L( CvCaptureCAM_V4L* capture );
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int );
static double icvGetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, int property_id );
static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture, int property_id, double value );
static int icvSetVideoSize( CvCaptureCAM_V4L* capture, int w, int h);
static int numCameras = 0;
static int indexList = 0;
static void icvInitCapture_V4L() {
int deviceHandle;
int CameraNumber;
char deviceName[MAX_DEVICE_DRIVER_NAME];
CameraNumber = 0;
while(CameraNumber < MAX_CAMERAS) {
sprintf(deviceName, "/dev/video%1d", CameraNumber);
deviceHandle = open(deviceName, O_RDONLY);
if (deviceHandle != -1) {
indexList|=(1 << CameraNumber);
numCameras++;
}
if (deviceHandle != -1)
close(deviceHandle);
CameraNumber++;
}
};
#ifdef HAVE_CAMV4L
static int
try_palette(int fd,
struct video_picture *cam_pic,
int pal,
int depth)
{
cam_pic->palette = pal;
cam_pic->depth = depth;
if (ioctl(fd, VIDIOCSPICT, cam_pic) < 0)
return 0;
if (ioctl(fd, VIDIOCGPICT, cam_pic) < 0)
return 0;
if (cam_pic->palette == pal)
return 1;
return 0;
}
#endif
#ifdef HAVE_CAMV4L2
static int try_palette_v4l2(CvCaptureCAM_V4L* capture, unsigned long colorspace)
{
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->form.fmt.pix.pixelformat = colorspace;
capture->form.fmt.pix.field = V4L2_FIELD_ANY;
capture->form.fmt.pix.width = DEFAULT_V4L_WIDTH;
capture->form.fmt.pix.height = DEFAULT_V4L_HEIGHT;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form))
return -1;
if (colorspace != capture->form.fmt.pix.pixelformat)
return -1;
else
return 0;
}
#endif
#ifdef HAVE_CAMV4L
static int try_init_v4l(CvCaptureCAM_V4L* capture, char *deviceName)
{
int detect = 0;
capture->deviceHandle = open(deviceName, O_RDWR);
if (capture->deviceHandle == 0)
{
detect = -1;
icvCloseCAM_V4L(capture);
}
if (detect == 0)
{
if (ioctl(capture->deviceHandle, VIDIOCGCAP, &capture->capability) < 0)
{
detect = 0;
icvCloseCAM_V4L(capture);
}
else
{
detect = 1;
}
}
return detect;
}
#endif
#ifdef HAVE_CAMV4L2
static int try_init_v4l2(CvCaptureCAM_V4L* capture, char *deviceName)
{
int deviceIndex;
capture->deviceHandle = open (deviceName, O_RDWR | O_NONBLOCK, 0);
if (-1 == capture->deviceHandle)
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 open \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return -1;
}
CLEAR (capture->cap);
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYCAP, &capture->cap))
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_QUERYCAP \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return 0;
}
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_INPUT, &deviceIndex))
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_G_INPUT \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return 0;
}
CLEAR (capture->inp);
capture->inp.index = deviceIndex;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp))
{
#ifndef NDEBUG
fprintf(stderr, "(DEBUG) try_init_v4l2 VIDIOC_ENUMINPUT \"%s\": %s\n", deviceName, strerror(errno));
#endif
icvCloseCAM_V4L(capture);
return 0;
}
return 1;
}
static int autosetup_capture_mode_v4l2(CvCaptureCAM_V4L* capture)
{
if (try_palette_v4l2(capture, V4L2_PIX_FMT_BGR24) == 0)
{
capture->palette = PALETTE_BGR24;
}
else
if (try_palette_v4l2(capture, V4L2_PIX_FMT_YVU420) == 0)
{
capture->palette = PALETTE_YVU420;
}
else
if (try_palette_v4l2(capture, V4L2_PIX_FMT_YUV411P) == 0)
{
capture->palette = PALETTE_YUV411P;
}
else
#ifdef HAVE_JPEG
if (try_palette_v4l2(capture, V4L2_PIX_FMT_MJPEG) == 0 ||
try_palette_v4l2(capture, V4L2_PIX_FMT_JPEG) == 0)
{
capture->palette = PALETTE_MJPEG;
}
else
#endif
if (try_palette_v4l2(capture, V4L2_PIX_FMT_YUYV) == 0)
{
capture->palette = PALETTE_YUYV;
}
else if (try_palette_v4l2(capture, V4L2_PIX_FMT_UYVY) == 0)
{
capture->palette = PALETTE_UYVY;
}
else
if (try_palette_v4l2(capture, V4L2_PIX_FMT_SN9C10X) == 0)
{
capture->palette = PALETTE_SN9C10X;
} else
if (try_palette_v4l2(capture, V4L2_PIX_FMT_SBGGR8) == 0)
{
capture->palette = PALETTE_SBGGR8;
} else
if (try_palette_v4l2(capture, V4L2_PIX_FMT_SGBRG) == 0)
{
capture->palette = PALETTE_SGBRG;
}
else if (try_palette_v4l2(capture, V4L2_PIX_FMT_RGB24) == 0)
{
capture->palette = PALETTE_RGB24;
}
else
{
fprintf(stderr, "VIDEOIO ERROR: V4L2: Pixel format of incoming image is unsupported by OpenCV\n");
icvCloseCAM_V4L(capture);
return -1;
}
return 0;
}
#endif
#ifdef HAVE_CAMV4L
static int autosetup_capture_mode_v4l(CvCaptureCAM_V4L* capture)
{
if(ioctl(capture->deviceHandle, VIDIOCGPICT, &capture->imageProperties) < 0) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Unable to determine size of incoming image\n");
icvCloseCAM_V4L(capture);
return -1;
}
if (try_palette(capture->deviceHandle, &capture->imageProperties, VIDEO_PALETTE_RGB24, 24)) {
}
else if (try_palette(capture->deviceHandle, &capture->imageProperties, VIDEO_PALETTE_YUV420P, 16)) {
}
else if (try_palette(capture->deviceHandle, &capture->imageProperties, VIDEO_PALETTE_YUV420, 16)) {
}
else if (try_palette(capture->deviceHandle, &capture->imageProperties, VIDEO_PALETTE_YUV411P, 16)) {
}
else {
fprintf(stderr, "VIDEOIO ERROR: V4L: Pixel format of incoming image is unsupported by OpenCV\n");
icvCloseCAM_V4L(capture);
return -1;
}
return 0;
}
#endif
#ifdef HAVE_CAMV4L2
static void v4l2_scan_controls(CvCaptureCAM_V4L* capture)
{
__u32 ctrl_id;
for (ctrl_id = V4L2_CID_BASE;
ctrl_id < V4L2_CID_LASTP1;
ctrl_id++)
{
CLEAR (capture->queryctrl);
capture->queryctrl.id = ctrl_id;
if (0 == ioctl (capture->deviceHandle, VIDIOC_QUERYCTRL,
&capture->queryctrl))
{
if (capture->queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
continue;
if (capture->queryctrl.id == V4L2_CID_BRIGHTNESS)
{
capture->v4l2_brightness = 1;
capture->v4l2_brightness_min = capture->queryctrl.minimum;
capture->v4l2_brightness_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_CONTRAST)
{
capture->v4l2_contrast = 1;
capture->v4l2_contrast_min = capture->queryctrl.minimum;
capture->v4l2_contrast_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_SATURATION)
{
capture->v4l2_saturation = 1;
capture->v4l2_saturation_min = capture->queryctrl.minimum;
capture->v4l2_saturation_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_HUE)
{
capture->v4l2_hue = 1;
capture->v4l2_hue_min = capture->queryctrl.minimum;
capture->v4l2_hue_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_GAIN)
{
capture->v4l2_gain = 1;
capture->v4l2_gain_min = capture->queryctrl.minimum;
capture->v4l2_gain_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_EXPOSURE)
{
capture->v4l2_exposure = 1;
capture->v4l2_exposure_min = capture->queryctrl.minimum;
capture->v4l2_exposure_max = capture->queryctrl.maximum;
}
} else {
if (errno == EINVAL)
continue;
perror ("VIDIOC_QUERYCTRL");
}
}
for (ctrl_id = V4L2_CID_PRIVATE_BASE;;ctrl_id++)
{
CLEAR (capture->queryctrl);
capture->queryctrl.id = ctrl_id;
if (0 == ioctl (capture->deviceHandle, VIDIOC_QUERYCTRL,
&capture->queryctrl))
{
if (capture->queryctrl.flags & V4L2_CTRL_FLAG_DISABLED)
continue;
if (capture->queryctrl.id == V4L2_CID_BRIGHTNESS)
{
capture->v4l2_brightness = 1;
capture->v4l2_brightness_min = capture->queryctrl.minimum;
capture->v4l2_brightness_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_CONTRAST)
{
capture->v4l2_contrast = 1;
capture->v4l2_contrast_min = capture->queryctrl.minimum;
capture->v4l2_contrast_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_SATURATION)
{
capture->v4l2_saturation = 1;
capture->v4l2_saturation_min = capture->queryctrl.minimum;
capture->v4l2_saturation_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_HUE)
{
capture->v4l2_hue = 1;
capture->v4l2_hue_min = capture->queryctrl.minimum;
capture->v4l2_hue_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_GAIN)
{
capture->v4l2_gain = 1;
capture->v4l2_gain_min = capture->queryctrl.minimum;
capture->v4l2_gain_max = capture->queryctrl.maximum;
}
if (capture->queryctrl.id == V4L2_CID_EXPOSURE)
{
capture->v4l2_exposure = 1;
capture->v4l2_exposure_min = capture->queryctrl.minimum;
capture->v4l2_exposure_max = capture->queryctrl.maximum;
}
} else {
if (errno == EINVAL)
break;
perror ("VIDIOC_QUERYCTRL");
}
}
}
static int _capture_V4L2 (CvCaptureCAM_V4L *capture, char *deviceName)
{
int detect_v4l2 = 0;
detect_v4l2 = try_init_v4l2(capture, deviceName);
if (detect_v4l2 != 1) {
return -1;
}
V4L2_SUPPORT = 1;
capture->v4l2_brightness = 0;
capture->v4l2_contrast = 0;
capture->v4l2_saturation = 0;
capture->v4l2_hue = 0;
capture->v4l2_gain = 0;
capture->v4l2_exposure = 0;
capture->v4l2_brightness_min = 0;
capture->v4l2_contrast_min = 0;
capture->v4l2_saturation_min = 0;
capture->v4l2_hue_min = 0;
capture->v4l2_gain_min = 0;
capture->v4l2_exposure_min = 0;
capture->v4l2_brightness_max = 0;
capture->v4l2_contrast_max = 0;
capture->v4l2_saturation_max = 0;
capture->v4l2_hue_max = 0;
capture->v4l2_gain_max = 0;
capture->v4l2_exposure_max = 0;
capture->timestamp.tv_sec = 0;
capture->timestamp.tv_usec = 0;
v4l2_scan_controls(capture);
if ((capture->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: device %s is unable to capture video memory.\n",deviceName);
icvCloseCAM_V4L(capture);
return -1;
}
if(capture->inp.index > 0) {
CLEAR (capture->inp);
capture->inp.index = CHANNEL_NUMBER;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_ENUMINPUT, &capture->inp))
{
fprintf (stderr, "VIDEOIO ERROR: V4L2: Aren't able to set channel number\n");
icvCloseCAM_V4L (capture);
return -1;
}
}
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: Could not obtain specifics of capture window.\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
if (V4L2_SUPPORT == 0)
{
}
if (autosetup_capture_mode_v4l2(capture) == -1)
return -1;
icvSetVideoSize(capture, DEFAULT_V4L_WIDTH, DEFAULT_V4L_HEIGHT);
unsigned int min;
min = capture->form.fmt.pix.width * 2;
if (capture->form.fmt.pix.bytesperline < min)
capture->form.fmt.pix.bytesperline = min;
min = capture->form.fmt.pix.bytesperline * capture->form.fmt.pix.height;
if (capture->form.fmt.pix.sizeimage < min)
capture->form.fmt.pix.sizeimage = min;
CLEAR (capture->req);
unsigned int buffer_number = DEFAULT_V4L_BUFFERS;
try_again:
capture->req.count = buffer_number;
capture->req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->req.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_REQBUFS, &capture->req))
{
if (EINVAL == errno)
{
fprintf (stderr, "%s does not support memory mapping\n", deviceName);
} else {
perror ("VIDIOC_REQBUFS");
}
icvCloseCAM_V4L (capture);
return -1;
}
if (capture->req.count < buffer_number)
{
if (buffer_number == 1)
{
fprintf (stderr, "Insufficient buffer memory on %s\n", deviceName);
icvCloseCAM_V4L (capture);
return -1;
} else {
buffer_number--;
fprintf (stderr, "Insufficient buffer memory on %s -- decreaseing buffers\n", deviceName);
goto try_again;
}
}
for (n_buffers = 0; n_buffers < capture->req.count; ++n_buffers)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QUERYBUF, &buf)) {
perror ("VIDIOC_QUERYBUF");
icvCloseCAM_V4L (capture);
return -1;
}
capture->buffers[n_buffers].length = buf.length;
capture->buffers[n_buffers].start =
mmap (NULL ,
buf.length,
PROT_READ | PROT_WRITE ,
MAP_SHARED ,
capture->deviceHandle, buf.m.offset);
if (MAP_FAILED == capture->buffers[n_buffers].start) {
perror ("mmap");
icvCloseCAM_V4L (capture);
return -1;
}
if (n_buffers == 0) {
capture->buffers[MAX_V4L_BUFFERS].start = malloc( buf.length );
capture->buffers[MAX_V4L_BUFFERS].length = buf.length;
}
}
cvInitImageHeader( &capture->frame,
cvSize( capture->form.fmt.pix.width,
capture->form.fmt.pix.height ),
IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
return 1;
};
#endif
#ifdef HAVE_CAMV4L
static int _capture_V4L (CvCaptureCAM_V4L *capture, char *deviceName)
{
int detect_v4l = 0;
detect_v4l = try_init_v4l(capture, deviceName);
if ((detect_v4l == -1)
)
{
fprintf (stderr, "VIDEOIO ERROR: V4L"
": device %s: Unable to open for READ ONLY\n", deviceName);
return -1;
}
if ((detect_v4l <= 0)
)
{
fprintf (stderr, "VIDEOIO ERROR: V4L"
": device %s: Unable to query number of channels\n", deviceName);
return -1;
}
{
if ((capture->capability.type & VID_TYPE_CAPTURE) == 0) {
fprintf( stderr, "VIDEOIO ERROR: V4L: "
"device %s is unable to capture video memory.\n",deviceName);
icvCloseCAM_V4L(capture);
return -1;
}
}
{
if(capture->capability.channels>0) {
struct video_channel selectedChannel;
memset(&selectedChannel, 0, sizeof(selectedChannel));
selectedChannel.channel=CHANNEL_NUMBER;
if (ioctl(capture->deviceHandle, VIDIOCGCHAN , &selectedChannel) != -1) {
if (ioctl(capture->deviceHandle, VIDIOCSCHAN , &selectedChannel) == -1) {
}
}
}
}
{
if(ioctl(capture->deviceHandle, VIDIOCGWIN, &capture->captureWindow) == -1) {
fprintf( stderr, "VIDEOIO ERROR: V4L: "
"Could not obtain specifics of capture window.\n\n");
icvCloseCAM_V4L(capture);
return -1;
}
}
{
if (autosetup_capture_mode_v4l(capture) == -1)
return -1;
}
{
ioctl(capture->deviceHandle, VIDIOCGMBUF, &capture->memoryBuffer);
capture->memoryMap = (char *)mmap(0,
capture->memoryBuffer.size,
PROT_READ | PROT_WRITE,
MAP_SHARED,
capture->deviceHandle,
0);
if (capture->memoryMap == MAP_FAILED) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Mapping Memmory from video source error: %s\n", strerror(errno));
icvCloseCAM_V4L(capture);
}
capture->mmaps = (struct video_mmap *)
(malloc(capture->memoryBuffer.frames * sizeof(struct video_mmap)));
if (!capture->mmaps) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Could not memory map video frames.\n");
icvCloseCAM_V4L(capture);
return -1;
}
}
cvInitImageHeader( &capture->frame,
cvSize( capture->captureWindow.width,
capture->captureWindow.height ),
IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
return 1;
};
#endif
static CvCaptureCAM_V4L * icvCaptureFromCAM_V4L (int index)
{
static int autoindex;
autoindex = 0;
char deviceName[MAX_DEVICE_DRIVER_NAME];
if (!numCameras)
icvInitCapture_V4L();
if (!numCameras)
return NULL;
if ( (index>-1) && ! ((1 << index) & indexList) )
{
fprintf( stderr, "VIDEOIO ERROR: V4L: index %d is not correct!\n",index);
return NULL;
}
CvCaptureCAM_V4L * capture = (CvCaptureCAM_V4L*)cvAlloc(sizeof(CvCaptureCAM_V4L));
if (!capture) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Could not allocate memory for capture process.\n");
return NULL;
}
if (index<0) {
for (; autoindex<MAX_CAMERAS;autoindex++)
if (indexList & (1<<autoindex))
break;
if (autoindex==MAX_CAMERAS)
return NULL;
index=autoindex;
autoindex++;
}
sprintf(deviceName, "/dev/video%1d", index);
memset(capture,0,sizeof(CvCaptureCAM_V4L));
capture->FirstCapture = 1;
#ifdef HAVE_CAMV4L2
if (_capture_V4L2 (capture, deviceName) == -1) {
icvCloseCAM_V4L(capture);
V4L2_SUPPORT = 0;
#endif
#ifdef HAVE_CAMV4L
if (_capture_V4L (capture, deviceName) == -1) {
icvCloseCAM_V4L(capture);
return NULL;
}
#endif
#ifdef HAVE_CAMV4L2
} else {
V4L2_SUPPORT = 1;
}
#endif
return capture;
};
#ifdef HAVE_CAMV4L2
static int read_frame_v4l2(CvCaptureCAM_V4L* capture) {
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_DQBUF, &buf)) {
switch (errno) {
case EAGAIN:
return 0;
case EIO:
if (!(buf.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE)))
{
if (ioctl(capture->deviceHandle, VIDIOC_QBUF, &buf) == -1)
{
return 0;
}
}
return 0;
default:
perror ("VIDIOC_DQBUF");
return 1;
}
}
assert(buf.index < capture->req.count);
memcpy(capture->buffers[MAX_V4L_BUFFERS].start,
capture->buffers[buf.index].start,
capture->buffers[MAX_V4L_BUFFERS].length );
capture->bufferIndex = MAX_V4L_BUFFERS;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf))
perror ("VIDIOC_QBUF");
capture->timestamp = buf.timestamp;
return 1;
}
static void mainloop_v4l2(CvCaptureCAM_V4L* capture) {
unsigned int count;
count = 1;
while (count-- > 0) {
for (;;) {
fd_set fds;
struct timeval tv;
int r;
FD_ZERO (&fds);
FD_SET (capture->deviceHandle, &fds);
tv.tv_sec = 10;
tv.tv_usec = 0;
r = select (capture->deviceHandle+1, &fds, NULL, NULL, &tv);
if (-1 == r) {
if (EINTR == errno)
continue;
perror ("select");
}
if (0 == r) {
fprintf (stderr, "select timeout\n");
break;
}
if (read_frame_v4l2 (capture))
break;
}
}
}
#endif
static int icvGrabFrameCAM_V4L(CvCaptureCAM_V4L* capture) {
if (capture->FirstCapture) {
#ifdef HAVE_CAMV4L2
#ifdef HAVE_CAMV4L
if (V4L2_SUPPORT == 1)
#endif
{
for (capture->bufferIndex = 0;
capture->bufferIndex < ((int)capture->req.count);
++capture->bufferIndex)
{
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = (unsigned long)capture->bufferIndex;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_QBUF, &buf)) {
perror ("VIDIOC_QBUF");
return 0;
}
}
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_STREAMON,
&capture->type)) {
perror ("VIDIOC_STREAMON");
return 0;
}
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L
{
for (capture->bufferIndex = 0;
capture->bufferIndex < (capture->memoryBuffer.frames-1);
++capture->bufferIndex) {
capture->mmaps[capture->bufferIndex].frame = capture->bufferIndex;
capture->mmaps[capture->bufferIndex].width = capture->captureWindow.width;
capture->mmaps[capture->bufferIndex].height = capture->captureWindow.height;
capture->mmaps[capture->bufferIndex].format = capture->imageProperties.palette;
if (ioctl(capture->deviceHandle, VIDIOCMCAPTURE, &capture->mmaps[capture->bufferIndex]) == -1) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Initial Capture Error: Unable to load initial memory buffers.\n");
return 0;
}
}
}
#endif
#if defined(V4L_ABORT_BADJPEG) && defined(HAVE_CAMV4L2)
if (V4L2_SUPPORT == 1)
{
mainloop_v4l2(capture);
}
#endif
capture->FirstCapture = 0;
}
#ifdef HAVE_CAMV4L2
if (V4L2_SUPPORT == 1)
{
mainloop_v4l2(capture);
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L
{
capture->mmaps[capture->bufferIndex].frame = capture->bufferIndex;
capture->mmaps[capture->bufferIndex].width = capture->captureWindow.width;
capture->mmaps[capture->bufferIndex].height = capture->captureWindow.height;
capture->mmaps[capture->bufferIndex].format = capture->imageProperties.palette;
if (ioctl (capture->deviceHandle, VIDIOCMCAPTURE,
&capture->mmaps[capture->bufferIndex]) == -1) {
return 1;
}
++capture->bufferIndex;
if (capture->bufferIndex == capture->memoryBuffer.frames) {
capture->bufferIndex = 0;
}
}
#endif
return(1);
}
#define LIMIT(x) ((x)>0xffffff?0xff: ((x)<=0xffff?0:((x)>>16)))
static inline void
move_420_block(int yTL, int yTR, int yBL, int yBR, int u, int v,
int rowPixels, unsigned char * rgb)
{
const int rvScale = 91881;
const int guScale = -22553;
const int gvScale = -46801;
const int buScale = 116129;
const int yScale = 65536;
int r, g, b;
g = guScale * u + gvScale * v;
r = rvScale * v;
b = buScale * u;
yTL *= yScale; yTR *= yScale;
yBL *= yScale; yBR *= yScale;
rgb[0] = LIMIT(b+yTL); rgb[1] = LIMIT(g+yTL);
rgb[2] = LIMIT(r+yTL);
rgb[3] = LIMIT(b+yTR); rgb[4] = LIMIT(g+yTR);
rgb[5] = LIMIT(r+yTR);
rgb += 3 * rowPixels;
rgb[0] = LIMIT(b+yBL); rgb[1] = LIMIT(g+yBL);
rgb[2] = LIMIT(r+yBL);
rgb[3] = LIMIT(b+yBR); rgb[4] = LIMIT(g+yBR);
rgb[5] = LIMIT(r+yBR);
}
static inline void
move_411_block(int yTL, int yTR, int yBL, int yBR, int u, int v,
int , unsigned char * rgb)
{
const int rvScale = 91881;
const int guScale = -22553;
const int gvScale = -46801;
const int buScale = 116129;
const int yScale = 65536;
int r, g, b;
g = guScale * u + gvScale * v;
r = rvScale * v;
b = buScale * u;
yTL *= yScale; yTR *= yScale;
yBL *= yScale; yBR *= yScale;
rgb[0] = LIMIT(b+yTL); rgb[1] = LIMIT(g+yTL);
rgb[2] = LIMIT(r+yTL);
rgb[3] = LIMIT(b+yTR); rgb[4] = LIMIT(g+yTR);
rgb[5] = LIMIT(r+yTR);
rgb += 6;
rgb[0] = LIMIT(b+yBL); rgb[1] = LIMIT(g+yBL);
rgb[2] = LIMIT(r+yBL);
rgb[3] = LIMIT(b+yBR); rgb[4] = LIMIT(g+yBR);
rgb[5] = LIMIT(r+yBR);
}
static void
yuv420p_to_rgb24(int width, int height,
unsigned char *pIn0, unsigned char *pOut0)
{
const int numpix = width * height;
const int bytes = 24 >> 3;
int i, j, y00, y01, y10, y11, u, v;
unsigned char *pY = pIn0;
unsigned char *pU = pY + numpix;
unsigned char *pV = pU + numpix / 4;
unsigned char *pOut = pOut0;
for (j = 0; j <= height - 2; j += 2) {
for (i = 0; i <= width - 2; i += 2) {
y00 = *pY;
y01 = *(pY + 1);
y10 = *(pY + width);
y11 = *(pY + width + 1);
u = (*pU++) - 128;
v = (*pV++) - 128;
move_420_block(y00, y01, y10, y11, u, v,
width, pOut);
pY += 2;
pOut += 2 * bytes;
}
pY += width;
pOut += width * bytes;
}
}
#ifdef HAVE_CAMV4L
static void
yuv420_to_rgb24(int width, int height,
unsigned char *pIn0, unsigned char *pOut0)
{
const int bytes = 24 >> 3;
int i, j, y00, y01, y10, y11, u, v;
unsigned char *pY = pIn0;
unsigned char *pU = pY + 4;
unsigned char *pV = pU + width;
unsigned char *pOut = pOut0;
for (j = 0; j <= height - 2; j += 2) {
for (i = 0; i <= width - 4; i += 4) {
y00 = *pY;
y01 = *(pY + 1);
y10 = *(pY + width);
y11 = *(pY + width + 1);
u = (*pU++) - 128;
v = (*pV++) - 128;
move_420_block(y00, y01, y10, y11, u, v,
width, pOut);
pY += 2;
pOut += 2 * bytes;
y00 = *pY;
y01 = *(pY + 1);
y10 = *(pY + width);
y11 = *(pY + width + 1);
u = (*pU++) - 128;
v = (*pV++) - 128;
move_420_block(y00, y01, y10, y11, u, v,
width, pOut);
pY += 4;
pOut += 2 * bytes;
}
pY += width;
pOut += width * bytes;
}
}
#endif
static void
yuv411p_to_rgb24(int width, int height,
unsigned char *pIn0, unsigned char *pOut0)
{
const int numpix = width * height;
const int bytes = 24 >> 3;
int i, j, y00, y01, y10, y11, u, v;
unsigned char *pY = pIn0;
unsigned char *pU = pY + numpix;
unsigned char *pV = pU + numpix / 4;
unsigned char *pOut = pOut0;
for (j = 0; j <= height; j++) {
for (i = 0; i <= width - 4; i += 4) {
y00 = *pY;
y01 = *(pY + 1);
y10 = *(pY + 2);
y11 = *(pY + 3);
u = (*pU++) - 128;
v = (*pV++) - 128;
move_411_block(y00, y01, y10, y11, u, v,
width, pOut);
pY += 4;
pOut += 4 * bytes;
}
}
}
#define SAT(c) \
if (c & (~255)) { if (c < 0) c = 0; else c = 255; }
#ifdef HAVE_CAMV4L2
static void
yuyv_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
{
unsigned char *s;
unsigned char *d;
int l, c;
int r, g, b, cr, cg, cb, y1, y2;
l = height;
s = src;
d = dst;
while (l--) {
c = width >> 1;
while (c--) {
y1 = *s++;
cb = ((*s - 128) * 454) >> 8;
cg = (*s++ - 128) * 88;
y2 = *s++;
cr = ((*s - 128) * 359) >> 8;
cg = (cg + (*s++ - 128) * 183) >> 8;
r = y1 + cr;
b = y1 + cb;
g = y1 - cg;
SAT(r);
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
r = y2 + cr;
b = y2 + cb;
g = y2 - cg;
SAT(r);
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
}
}
}
static void
uyvy_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
{
unsigned char *s;
unsigned char *d;
int l, c;
int r, g, b, cr, cg, cb, y1, y2;
l = height;
s = src;
d = dst;
while (l--) {
c = width >> 1;
while (c--) {
cb = ((*s - 128) * 454) >> 8;
cg = (*s++ - 128) * 88;
y1 = *s++;
cr = ((*s - 128) * 359) >> 8;
cg = (cg + (*s++ - 128) * 183) >> 8;
y2 = *s++;
r = y1 + cr;
b = y1 + cb;
g = y1 - cg;
SAT(r);
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
r = y2 + cr;
b = y2 + cb;
g = y2 - cg;
SAT(r);
SAT(g);
SAT(b);
*d++ = b;
*d++ = g;
*d++ = r;
}
}
}
#endif
#ifdef HAVE_JPEG
static bool
mjpeg_to_rgb24 (int width, int height,
unsigned char *src, int length,
unsigned char *dst)
{
cv::Mat temp=cv::imdecode(cv::Mat(std::vector<uchar>(src, src + length)), 1);
if( !temp.data || temp.cols != width || temp.rows != height )
return false;
memcpy(dst, temp.data, width*height*3);
return true;
}
#endif
#ifdef HAVE_CAMV4L2
static void bayer2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, unsigned char *dst)
{
long int i;
unsigned char *rawpt, *scanpt;
long int size;
rawpt = src;
scanpt = dst;
size = WIDTH*HEIGHT;
for ( i = 0; i < size; i++ ) {
if ( (i/WIDTH) % 2 == 0 ) {
if ( (i % 2) == 0 ) {
if ( (i > WIDTH) && ((i % WIDTH) > 0) ) {
*scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+
*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4;
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+
*(rawpt+WIDTH)+*(rawpt-WIDTH))/4;
*scanpt++ = *rawpt;
} else {
*scanpt++ = *(rawpt+WIDTH+1);
*scanpt++ = (*(rawpt+1)+*(rawpt+WIDTH))/2;
*scanpt++ = *rawpt;
}
} else {
if ( (i > WIDTH) && ((i % WIDTH) < (WIDTH-1)) ) {
*scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2;
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
} else {
*scanpt++ = *(rawpt+WIDTH);
*scanpt++ = *rawpt;
*scanpt++ = *(rawpt-1);
}
}
} else {
if ( (i % 2) == 0 ) {
if ( (i < (WIDTH*(HEIGHT-1))) && ((i % WIDTH) > 0) ) {
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt+WIDTH)+*(rawpt-WIDTH))/2;
} else {
*scanpt++ = *(rawpt+1);
*scanpt++ = *rawpt;
*scanpt++ = *(rawpt-WIDTH);
}
} else {
if ( i < (WIDTH*(HEIGHT-1)) && ((i % WIDTH) < (WIDTH-1)) ) {
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+
*(rawpt-WIDTH)+*(rawpt+WIDTH))/4;
*scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+
*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4;
} else {
*scanpt++ = *rawpt;
*scanpt++ = (*(rawpt-1)+*(rawpt-WIDTH))/2;
*scanpt++ = *(rawpt-WIDTH-1);
}
}
}
rawpt++;
}
}
static void sgbrg2rgb24(long int WIDTH, long int HEIGHT, unsigned char *src, unsigned char *dst)
{
long int i;
unsigned char *rawpt, *scanpt;
long int size;
rawpt = src;
scanpt = dst;
size = WIDTH*HEIGHT;
for ( i = 0; i < size; i++ )
{
if ( (i/WIDTH) % 2 == 0 )
{
if ( (i % 2) == 0 )
{
if ( (i > WIDTH) && ((i % WIDTH) > 0) )
{
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-WIDTH) + *(rawpt+WIDTH))/2;
} else
{
*scanpt++ = *(rawpt+1);
*scanpt++ = *(rawpt);
*scanpt++ = *(rawpt+WIDTH);
}
} else
{
if ( (i > WIDTH) && ((i % WIDTH) < (WIDTH-1)) )
{
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+*(rawpt-WIDTH)+*(rawpt+WIDTH))/4;
*scanpt++ = (*(rawpt-WIDTH-1) + *(rawpt-WIDTH+1) + *(rawpt+WIDTH-1) + *(rawpt+WIDTH+1))/4;
} else
{
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1)+*(rawpt+WIDTH))/2;
*scanpt++ = *(rawpt+WIDTH-1);
}
}
} else
{
if ( (i % 2) == 0 )
{
if ( (i < (WIDTH*(HEIGHT-1))) && ((i % WIDTH) > 0) )
{
*scanpt++ = (*(rawpt-WIDTH-1)+*(rawpt-WIDTH+1)+*(rawpt+WIDTH-1)+*(rawpt+WIDTH+1))/4;
*scanpt++ = (*(rawpt-1)+*(rawpt+1)+*(rawpt-WIDTH)+*(rawpt+WIDTH))/4;
*scanpt++ = *(rawpt);
} else
{
*scanpt++ = *(rawpt-WIDTH+1);
*scanpt++ = (*(rawpt+1)+*(rawpt-WIDTH))/2;
*scanpt++ = *(rawpt);
}
} else
{
if ( i < (WIDTH*(HEIGHT-1)) && ((i % WIDTH) < (WIDTH-1)) )
{
*scanpt++ = (*(rawpt-WIDTH)+*(rawpt+WIDTH))/2;
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1)+*(rawpt+1))/2;
} else
{
*scanpt++ = (*(rawpt-WIDTH));
*scanpt++ = *(rawpt);
*scanpt++ = (*(rawpt-1));
}
}
}
rawpt++;
}
}
static void
rgb24_to_rgb24 (int width, int height, unsigned char *src, unsigned char *dst)
{
const int size = width * height;
for(int i = 0; i < size; ++i, src += 3, dst += 3)
{
*(dst + 0) = *(src + 2);
*(dst + 1) = *(src + 1);
*(dst + 2) = *(src + 0);
}
}
#define CLAMP(x) ((x)<0?0:((x)>255)?255:(x))
typedef struct {
int is_abs;
int len;
int val;
} code_table_t;
static code_table_t table[256];
static int init_done = 0;
static void sonix_decompress_init(void)
{
int i;
int is_abs, val, len;
for (i = 0; i < 256; i++) {
is_abs = 0;
val = 0;
len = 0;
if ((i & 0x80) == 0) {
val = 0;
len = 1;
}
else if ((i & 0xE0) == 0x80) {
val = +4;
len = 3;
}
else if ((i & 0xE0) == 0xA0) {
val = -4;
len = 3;
}
else if ((i & 0xF0) == 0xD0) {
val = +11;
len = 4;
}
else if ((i & 0xF0) == 0xF0) {
val = -11;
len = 4;
}
else if ((i & 0xF8) == 0xC8) {
val = +20;
len = 5;
}
else if ((i & 0xFC) == 0xC0) {
val = -20;
len = 6;
}
else if ((i & 0xFC) == 0xC4) {
val = 0;
len = 8;
}
else if ((i & 0xF0) == 0xE0) {
is_abs = 1;
val = (i & 0x0F) << 4;
len = 8;
}
table[i].is_abs = is_abs;
table[i].val = val;
table[i].len = len;
}
init_done = 1;
}
static int sonix_decompress(int width, int height, unsigned char *inp, unsigned char *outp)
{
int row, col;
int val;
int bitpos;
unsigned char code;
unsigned char *addr;
if (!init_done) {
return -1;
}
bitpos = 0;
for (row = 0; row < height; row++) {
col = 0;
if (row < 2) {
addr = inp + (bitpos >> 3);
code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
bitpos += 8;
*outp++ = code;
addr = inp + (bitpos >> 3);
code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
bitpos += 8;
*outp++ = code;
col += 2;
}
while (col < width) {
addr = inp + (bitpos >> 3);
code = (addr[0] << (bitpos & 7)) | (addr[1] >> (8 - (bitpos & 7)));
bitpos += table[code].len;
val = table[code].val;
if (!table[code].is_abs) {
if (col < 2) {
val += outp[-2*width];
}
else if (row < 2) {
val += outp[-2];
}
else {
val += (outp[-2] + outp[-2*width]) / 2;
}
}
*outp++ = CLAMP(val);
col++;
}
}
return 0;
}
#endif
static IplImage* icvRetrieveFrameCAM_V4L( CvCaptureCAM_V4L* capture, int) {
#ifdef HAVE_CAMV4L2
if (V4L2_SUPPORT == 0)
#endif
#ifdef HAVE_CAMV4L
{
if (ioctl(capture->deviceHandle, VIDIOCSYNC, &capture->mmaps[capture->bufferIndex].frame) == -1) {
fprintf( stderr, "VIDEOIO ERROR: V4L: Could not SYNC to video stream. %s\n", strerror(errno));
}
}
#endif
#ifdef HAVE_CAMV4L2
if (V4L2_SUPPORT == 1)
{
if(((unsigned long)capture->frame.width != capture->form.fmt.pix.width)
|| ((unsigned long)capture->frame.height != capture->form.fmt.pix.height)) {
cvFree(&capture->frame.imageData);
cvInitImageHeader( &capture->frame,
cvSize( capture->form.fmt.pix.width,
capture->form.fmt.pix.height ),
IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
}
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L
{
if((capture->frame.width != capture->mmaps[capture->bufferIndex].width)
|| (capture->frame.height != capture->mmaps[capture->bufferIndex].height)) {
cvFree(&capture->frame.imageData);
cvInitImageHeader( &capture->frame,
cvSize( capture->captureWindow.width,
capture->captureWindow.height ),
IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 );
capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
}
}
#endif
#ifdef HAVE_CAMV4L2
if (V4L2_SUPPORT == 1)
{
switch (capture->palette)
{
case PALETTE_BGR24:
memcpy((char *)capture->frame.imageData,
(char *)capture->buffers[capture->bufferIndex].start,
capture->frame.imageSize);
break;
case PALETTE_YVU420:
yuv420p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_YUV411P:
yuv411p_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
#ifdef HAVE_JPEG
case PALETTE_MJPEG:
if (!mjpeg_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex]
.start),
capture->buffers[capture->bufferIndex].length,
(unsigned char*)capture->frame.imageData))
return 0;
break;
#endif
case PALETTE_YUYV:
yuyv_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_UYVY:
uyvy_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)(capture->buffers[capture->bufferIndex].start),
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SBGGR8:
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SN9C10X:
sonix_decompress_init();
sonix_decompress(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[capture->bufferIndex].start,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start);
bayer2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_SGBRG:
sgbrg2rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
case PALETTE_RGB24:
rgb24_to_rgb24(capture->form.fmt.pix.width,
capture->form.fmt.pix.height,
(unsigned char*)capture->buffers[(capture->bufferIndex+1) % capture->req.count].start,
(unsigned char*)capture->frame.imageData);
break;
}
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L
{
switch(capture->imageProperties.palette)
{
case VIDEO_PALETTE_RGB24:
memcpy((char *)capture->frame.imageData,
(char *)(capture->memoryMap + capture->memoryBuffer.offsets[capture->bufferIndex]),
capture->frame.imageSize);
break;
case VIDEO_PALETTE_YUV420P:
yuv420p_to_rgb24(capture->captureWindow.width,
capture->captureWindow.height,
(unsigned char*)(capture->memoryMap + capture->memoryBuffer.offsets[capture->bufferIndex]),
(unsigned char*)capture->frame.imageData);
break;
case VIDEO_PALETTE_YUV420:
yuv420_to_rgb24(capture->captureWindow.width,
capture->captureWindow.height,
(unsigned char*)(capture->memoryMap + capture->memoryBuffer.offsets[capture->bufferIndex]),
(unsigned char*)capture->frame.imageData);
break;
case VIDEO_PALETTE_YUV411P:
yuv411p_to_rgb24(capture->captureWindow.width,
capture->captureWindow.height,
(unsigned char*)(capture->memoryMap + capture->memoryBuffer.offsets[capture->bufferIndex]),
(unsigned char*)capture->frame.imageData);
break;
default:
fprintf( stderr,
"VIDEOIO ERROR: V4L: Cannot convert from palette %d to RGB\n",
capture->imageProperties.palette);
return 0;
}
}
#endif
return(&capture->frame);
}
static double icvGetPropertyCAM_V4L (CvCaptureCAM_V4L* capture,
int property_id ) {
#ifdef HAVE_CAMV4L2
#ifdef HAVE_CAMV4L
if (V4L2_SUPPORT == 1)
#endif
{
int v4l2_min = 0;
int v4l2_max = 255;
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form)) {
perror ("VIDIOC_G_FMT");
return -1;
}
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
return capture->form.fmt.pix.width;
case CV_CAP_PROP_FRAME_HEIGHT:
return capture->form.fmt.pix.height;
}
switch (property_id) {
case CV_CAP_PROP_POS_MSEC:
if (capture->FirstCapture) {
return 0;
} else {
return 1000 * capture->timestamp.tv_sec + ((double) capture->timestamp.tv_usec) / 1000;
}
break;
case CV_CAP_PROP_BRIGHTNESS:
capture->control.id = V4L2_CID_BRIGHTNESS;
break;
case CV_CAP_PROP_CONTRAST:
capture->control.id = V4L2_CID_CONTRAST;
break;
case CV_CAP_PROP_SATURATION:
capture->control.id = V4L2_CID_SATURATION;
break;
case CV_CAP_PROP_HUE:
capture->control.id = V4L2_CID_HUE;
break;
case CV_CAP_PROP_GAIN:
capture->control.id = V4L2_CID_GAIN;
break;
case CV_CAP_PROP_EXPOSURE:
capture->control.id = V4L2_CID_EXPOSURE;
break;
default:
fprintf(stderr,
"VIDEOIO ERROR: V4L2: getting property #%d is not supported\n",
property_id);
return -1;
}
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_CTRL,
&capture->control)) {
fprintf( stderr, "VIDEOIO ERROR: V4L2: ");
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
fprintf (stderr, "Brightness");
break;
case CV_CAP_PROP_CONTRAST:
fprintf (stderr, "Contrast");
break;
case CV_CAP_PROP_SATURATION:
fprintf (stderr, "Saturation");
break;
case CV_CAP_PROP_HUE:
fprintf (stderr, "Hue");
break;
case CV_CAP_PROP_GAIN:
fprintf (stderr, "Gain");
break;
case CV_CAP_PROP_EXPOSURE:
fprintf (stderr, "Exposure");
break;
}
fprintf (stderr, " is not supported by your device\n");
return -1;
}
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
v4l2_min = capture->v4l2_brightness_min;
v4l2_max = capture->v4l2_brightness_max;
break;
case CV_CAP_PROP_CONTRAST:
v4l2_min = capture->v4l2_contrast_min;
v4l2_max = capture->v4l2_contrast_max;
break;
case CV_CAP_PROP_SATURATION:
v4l2_min = capture->v4l2_saturation_min;
v4l2_max = capture->v4l2_saturation_max;
break;
case CV_CAP_PROP_HUE:
v4l2_min = capture->v4l2_hue_min;
v4l2_max = capture->v4l2_hue_max;
break;
case CV_CAP_PROP_GAIN:
v4l2_min = capture->v4l2_gain_min;
v4l2_max = capture->v4l2_gain_max;
break;
case CV_CAP_PROP_EXPOSURE:
v4l2_min = capture->v4l2_exposure_min;
v4l2_max = capture->v4l2_exposure_max;
break;
}
return ((float)capture->control.value - v4l2_min + 1) / (v4l2_max - v4l2_min);
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L
{
int retval = -1;
if (ioctl (capture->deviceHandle,
VIDIOCGWIN, &capture->captureWindow) < 0) {
fprintf (stderr,
"VIDEOIO ERROR: V4L: "
"Unable to determine size of incoming image\n");
icvCloseCAM_V4L(capture);
return -1;
}
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
retval = capture->captureWindow.width;
break;
case CV_CAP_PROP_FRAME_HEIGHT:
retval = capture->captureWindow.height;
break;
case CV_CAP_PROP_BRIGHTNESS:
retval = capture->imageProperties.brightness;
break;
case CV_CAP_PROP_CONTRAST:
retval = capture->imageProperties.contrast;
break;
case CV_CAP_PROP_SATURATION:
retval = capture->imageProperties.colour;
break;
case CV_CAP_PROP_HUE:
retval = capture->imageProperties.hue;
break;
case CV_CAP_PROP_GAIN:
fprintf(stderr,
"VIDEOIO ERROR: V4L: Gain control in V4L is not supported\n");
return -1;
break;
case CV_CAP_PROP_EXPOSURE:
fprintf(stderr,
"VIDEOIO ERROR: V4L: Exposure control in V4L is not supported\n");
return -1;
break;
default:
fprintf(stderr,
"VIDEOIO ERROR: V4L: getting property #%d is not supported\n",
property_id);
}
if (retval == -1) {
return -1;
}
return float (retval) / 0xFFFF;
}
#endif
};
static int icvSetVideoSize( CvCaptureCAM_V4L* capture, int w, int h) {
#ifdef HAVE_CAMV4L2
if (V4L2_SUPPORT == 1)
{
CLEAR (capture->cropcap);
capture->cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl (capture->deviceHandle, VIDIOC_CROPCAP, &capture->cropcap) < 0) {
fprintf(stderr, "VIDEOIO ERROR: V4L/V4L2: VIDIOC_CROPCAP\n");
} else {
CLEAR (capture->crop);
capture->crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
capture->crop.c= capture->cropcap.defrect;
if (ioctl (capture->deviceHandle, VIDIOC_S_CROP, &capture->crop) < 0) {
fprintf(stderr, "VIDEOIO ERROR: V4L/V4L2: VIDIOC_S_CROP\n");
}
}
CLEAR (capture->form);
capture->form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form);
capture->form.fmt.pix.width = w;
capture->form.fmt.pix.height = h;
capture->form.fmt.win.chromakey = 0;
capture->form.fmt.win.field = V4L2_FIELD_ANY;
capture->form.fmt.win.clips = 0;
capture->form.fmt.win.clipcount = 0;
capture->form.fmt.pix.field = V4L2_FIELD_ANY;
ioctl (capture->deviceHandle, VIDIOC_S_FMT, &capture->form);
struct v4l2_streamparm setfps;
memset (&setfps, 0, sizeof(struct v4l2_streamparm));
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.parm.capture.timeperframe.numerator = 1;
setfps.parm.capture.timeperframe.denominator = 30;
ioctl (capture->deviceHandle, VIDIOC_S_PARM, &setfps);
capture->FirstCapture = 1;
if (-1 == ioctl (capture->deviceHandle, VIDIOC_G_FMT, &capture->form))
{
fprintf(stderr, "VIDEOIO ERROR: V4L/V4L2: Could not obtain specifics of capture window.\n\n");
icvCloseCAM_V4L(capture);
return 0;
}
return 0;
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L
{
if (capture==0) return 0;
if (w>capture->capability.maxwidth) {
w=capture->capability.maxwidth;
}
if (h>capture->capability.maxheight) {
h=capture->capability.maxheight;
}
capture->captureWindow.width=w;
capture->captureWindow.height=h;
if (ioctl(capture->deviceHandle, VIDIOCSWIN, &capture->captureWindow) < 0) {
icvCloseCAM_V4L(capture);
return 0;
}
if (ioctl(capture->deviceHandle, VIDIOCGWIN, &capture->captureWindow) < 0) {
icvCloseCAM_V4L(capture);
return 0;
}
capture->FirstCapture = 1;
}
#endif
return 0;
}
static int icvSetControl (CvCaptureCAM_V4L* capture,
int property_id, double value) {
if (value < 0.0) {
value = 0.0;
} else if (value > 1.0) {
value = 1.0;
}
#ifdef HAVE_CAMV4L2
if (V4L2_SUPPORT == 1)
{
int v4l2_min = 0;
int v4l2_max = 255;
CLEAR (capture->control);
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
capture->control.id = V4L2_CID_BRIGHTNESS;
break;
case CV_CAP_PROP_CONTRAST:
capture->control.id = V4L2_CID_CONTRAST;
break;
case CV_CAP_PROP_SATURATION:
capture->control.id = V4L2_CID_SATURATION;
break;
case CV_CAP_PROP_HUE:
capture->control.id = V4L2_CID_HUE;
break;
case CV_CAP_PROP_GAIN:
capture->control.id = V4L2_CID_GAIN;
break;
case CV_CAP_PROP_EXPOSURE:
capture->control.id = V4L2_CID_EXPOSURE;
break;
default:
fprintf(stderr,
"VIDEOIO ERROR: V4L2: setting property #%d is not supported\n",
property_id);
return -1;
}
if (-1 == ioctl (capture->deviceHandle,
VIDIOC_G_CTRL, &capture->control)) {
return -1;
}
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
v4l2_min = capture->v4l2_brightness_min;
v4l2_max = capture->v4l2_brightness_max;
break;
case CV_CAP_PROP_CONTRAST:
v4l2_min = capture->v4l2_contrast_min;
v4l2_max = capture->v4l2_contrast_max;
break;
case CV_CAP_PROP_SATURATION:
v4l2_min = capture->v4l2_saturation_min;
v4l2_max = capture->v4l2_saturation_max;
break;
case CV_CAP_PROP_HUE:
v4l2_min = capture->v4l2_hue_min;
v4l2_max = capture->v4l2_hue_max;
break;
case CV_CAP_PROP_GAIN:
v4l2_min = capture->v4l2_gain_min;
v4l2_max = capture->v4l2_gain_max;
break;
case CV_CAP_PROP_EXPOSURE:
v4l2_min = capture->v4l2_exposure_min;
v4l2_max = capture->v4l2_exposure_max;
break;
}
CLEAR (capture->control);
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
capture->control.id = V4L2_CID_BRIGHTNESS;
break;
case CV_CAP_PROP_CONTRAST:
capture->control.id = V4L2_CID_CONTRAST;
break;
case CV_CAP_PROP_SATURATION:
capture->control.id = V4L2_CID_SATURATION;
break;
case CV_CAP_PROP_HUE:
capture->control.id = V4L2_CID_HUE;
break;
case CV_CAP_PROP_GAIN:
capture->control.id = V4L2_CID_GAIN;
break;
case CV_CAP_PROP_EXPOSURE:
capture->control.id = V4L2_CID_EXPOSURE;
break;
default:
fprintf(stderr,
"VIDEOIO ERROR: V4L2: setting property #%d is not supported\n",
property_id);
return -1;
}
capture->control.value = (int)(value * (v4l2_max - v4l2_min) + v4l2_min);
if (-1 == ioctl (capture->deviceHandle,
VIDIOC_S_CTRL, &capture->control) && errno != ERANGE) {
perror ("VIDIOC_S_CTRL");
return -1;
}
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L
{
int v4l_value;
v4l_value = (int)(0xFFFF * value);
switch (property_id) {
case CV_CAP_PROP_BRIGHTNESS:
capture->imageProperties.brightness = v4l_value;
break;
case CV_CAP_PROP_CONTRAST:
capture->imageProperties.contrast = v4l_value;
break;
case CV_CAP_PROP_SATURATION:
capture->imageProperties.colour = v4l_value;
break;
case CV_CAP_PROP_HUE:
capture->imageProperties.hue = v4l_value;
break;
case CV_CAP_PROP_GAIN:
fprintf(stderr,
"VIDEOIO ERROR: V4L: Gain control in V4L is not supported\n");
return -1;
case CV_CAP_PROP_EXPOSURE:
fprintf(stderr,
"VIDEOIO ERROR: V4L: Exposure control in V4L is not supported\n");
return -1;
default:
fprintf(stderr,
"VIDEOIO ERROR: V4L: property #%d is not supported\n",
property_id);
return -1;
}
if (ioctl(capture->deviceHandle, VIDIOCSPICT, &capture->imageProperties)
< 0)
{
fprintf(stderr,
"VIDEOIO ERROR: V4L: Unable to set video informations\n");
icvCloseCAM_V4L(capture);
return -1;
}
}
#endif
return 0;
}
static int icvSetPropertyCAM_V4L( CvCaptureCAM_V4L* capture,
int property_id, double value ){
static int width = 0, height = 0;
int retval;
retval = 0;
switch (property_id) {
case CV_CAP_PROP_FRAME_WIDTH:
width = cvRound(value);
if(width !=0 && height != 0) {
retval = icvSetVideoSize( capture, width, height);
width = height = 0;
}
break;
case CV_CAP_PROP_FRAME_HEIGHT:
height = cvRound(value);
if(width !=0 && height != 0) {
retval = icvSetVideoSize( capture, width, height);
width = height = 0;
}
break;
case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_HUE:
case CV_CAP_PROP_GAIN:
case CV_CAP_PROP_EXPOSURE:
retval = icvSetControl(capture, property_id, value);
break;
default:
fprintf(stderr,
"VIDEOIO ERROR: V4L: setting property #%d is not supported\n",
property_id);
}
return retval;
}
static void icvCloseCAM_V4L( CvCaptureCAM_V4L* capture ){
if (capture)
{
#ifdef HAVE_CAMV4L2
if (V4L2_SUPPORT == 0)
#endif
#ifdef HAVE_CAMV4L
{
if (capture->mmaps)
free(capture->mmaps);
if (capture->memoryMap)
munmap(capture->memoryMap, capture->memoryBuffer.size);
}
#endif
#if defined(HAVE_CAMV4L) && defined(HAVE_CAMV4L2)
else
#endif
#ifdef HAVE_CAMV4L2
{
capture->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(capture->deviceHandle, VIDIOC_STREAMOFF, &capture->type)) {
perror ("Unable to stop the stream.");
}
for (unsigned int n_buffers_ = 0; n_buffers_ < capture->req.count; ++n_buffers_)
{
if (-1 == munmap (capture->buffers[n_buffers_].start, capture->buffers[n_buffers_].length)) {
perror ("munmap");
}
}
if (capture->buffers[MAX_V4L_BUFFERS].start)
{
free(capture->buffers[MAX_V4L_BUFFERS].start);
capture->buffers[MAX_V4L_BUFFERS].start = 0;
}
}
#endif
if (capture->deviceHandle != -1)
close(capture->deviceHandle);
if (capture->frame.imageData) cvFree(&capture->frame.imageData);
}
};
class CvCaptureCAM_V4L_CPP : CvCapture
{
public:
CvCaptureCAM_V4L_CPP() { captureV4L = 0; }
virtual ~CvCaptureCAM_V4L_CPP() { close(); }
virtual bool open( int index );
virtual void close();
virtual double getProperty(int) const;
virtual bool setProperty(int, double);
virtual bool grabFrame();
virtual IplImage* retrieveFrame(int);
protected:
CvCaptureCAM_V4L* captureV4L;
};
bool CvCaptureCAM_V4L_CPP::open( int index )
{
close();
captureV4L = icvCaptureFromCAM_V4L(index);
return captureV4L != 0;
}
void CvCaptureCAM_V4L_CPP::close()
{
if( captureV4L )
{
icvCloseCAM_V4L( captureV4L );
cvFree( &captureV4L );
}
}
bool CvCaptureCAM_V4L_CPP::grabFrame()
{
return captureV4L ? icvGrabFrameCAM_V4L( captureV4L ) != 0 : false;
}
IplImage* CvCaptureCAM_V4L_CPP::retrieveFrame(int)
{
return captureV4L ? icvRetrieveFrameCAM_V4L( captureV4L, 0 ) : 0;
}
double CvCaptureCAM_V4L_CPP::getProperty( int propId ) const
{
return captureV4L ? icvGetPropertyCAM_V4L( captureV4L, propId ) : 0.0;
}
bool CvCaptureCAM_V4L_CPP::setProperty( int propId, double value )
{
return captureV4L ? icvSetPropertyCAM_V4L( captureV4L, propId, value ) != 0 : false;
}
CvCapture* cvCreateCameraCapture_V4L( int index )
{
CvCaptureCAM_V4L_CPP* capture = new CvCaptureCAM_V4L_CPP;
if( capture->open( index ))
return (CvCapture*)capture;
delete capture;
return 0;
}
#endif