This source file includes following definitions.
- DebugPrintOut
- MyFreeMediaType
- MyDeleteMediaType
- setupBuffer
- STDMETHODIMP_
- STDMETHODIMP_
- QueryInterface
- SampleCB
- BufferCB
- setSize
- NukeDownstream
- destroyGraph
- setVerbose
- setUseCallback
- setIdealFramerate
- setAutoReconnectOnFreeze
- setupDevice
- setupDevice
- setupDevice
- setupDeviceFourcc
- setupDevice
- setFormat
- getDeviceName
- listDevices
- getWidth
- getHeight
- getFourcc
- getFPS
- getSize
- getPixels
- getPixels
- isFrameNew
- isDeviceSetup
- basicThread
- showSettingsWindow
- getVideoSettingFilter
- setVideoSettingFilterPct
- setVideoSettingFilter
- setVideoSettingCameraPct
- setVideoSettingCamera
- getVideoSettingCamera
- stopDevice
- restartDevice
- comInit
- comUnInit
- setAttemptCaptureSize
- setPhyCon
- setup
- processPixels
- getMediaSubtypeAsString
- getFourccFromMediaSubtype
- getMediaSubtypeFromFourcc
- getVideoPropertyAsString
- getVideoPropertyFromCV
- getCameraPropertyFromCV
- getCameraPropertyAsString
- findClosestSizeAndSubtype
- setSizeAndSubtype
- start
- getDeviceCount
- getDevice
- ShowFilterPropertyPages
- ShowStreamPropertyPages
- SaveGraphFile
- routeCrossbar
- m_heightSet
- getProperty
- setProperty
- grabFrame
- retrieveFrame
- getCaptureDomain
- isOpened
- open
- close
#include "precomp.hpp"
#if (defined WIN32 || defined _WIN32) && defined HAVE_DSHOW
#include "cap_dshow.hpp"
#if defined _MSC_VER && _MSC_VER >= 100
#pragma warning(disable: 4995)
#endif
#include <tchar.h>
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <wchar.h>
#include <vector>
#if defined _MSC_VER && _MSC_VER >= 1500
# include "DShow.h"
# include "strmif.h"
# include "Aviriff.h"
# include "dvdmedia.h"
# include "bdaiface.h"
#else
# ifdef _MSC_VER
# define __extension__
typedef BOOL WINBOOL;
#endif
#include "dshow/dshow.h"
#include "dshow/dvdmedia.h"
#include "dshow/bdatypes.h"
interface IEnumPIDMap : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE Next(
ULONG cRequest,
PID_MAP *pPIDMap,
ULONG *pcReceived) = 0;
virtual HRESULT STDMETHODCALLTYPE Skip(
ULONG cRecords) = 0;
virtual HRESULT STDMETHODCALLTYPE Reset( void) = 0;
virtual HRESULT STDMETHODCALLTYPE Clone(
IEnumPIDMap **ppIEnumPIDMap) = 0;
};
interface IMPEG2PIDMap : public IUnknown
{
virtual HRESULT STDMETHODCALLTYPE MapPID(
ULONG culPID,
ULONG *pulPID,
MEDIA_SAMPLE_CONTENT MediaSampleContent) = 0;
virtual HRESULT STDMETHODCALLTYPE UnmapPID(
ULONG culPID,
ULONG *pulPID) = 0;
virtual HRESULT STDMETHODCALLTYPE EnumPIDMap(
IEnumPIDMap **pIEnumPIDMap) = 0;
};
#endif
#include <process.h>
#ifndef _WIN32_WINNT
#define _WIN32_WINNT 0x400
#endif
#include <initguid.h>
DEFINE_GUID(MEDIASUBTYPE_GREY, 0x59455247, 0x0000, 0x0010, 0x80, 0x00,
0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
DEFINE_GUID(MEDIASUBTYPE_Y8, 0x20203859, 0x0000, 0x0010, 0x80, 0x00,
0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
DEFINE_GUID(MEDIASUBTYPE_Y800, 0x30303859, 0x0000, 0x0010, 0x80, 0x00,
0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
DEFINE_GUID(CLSID_CaptureGraphBuilder2,0xbf87b6e1,0x8c27,0x11d0,0xb3,0xf0,0x00,0xaa,0x00,0x37,0x61,0xc5);
DEFINE_GUID(CLSID_FilterGraph,0xe436ebb3,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(CLSID_NullRenderer,0xc1f400a4,0x3f08,0x11d3,0x9f,0x0b,0x00,0x60,0x08,0x03,0x9e,0x37);
DEFINE_GUID(CLSID_SampleGrabber,0xc1f400a0,0x3f08,0x11d3,0x9f,0x0b,0x00,0x60,0x08,0x03,0x9e,0x37);
DEFINE_GUID(CLSID_SystemDeviceEnum,0x62be5d10,0x60eb,0x11d0,0xbd,0x3b,0x00,0xa0,0xc9,0x11,0xce,0x86);
DEFINE_GUID(CLSID_VideoInputDeviceCategory,0x860bb310,0x5d01,0x11d0,0xbd,0x3b,0x00,0xa0,0xc9,0x11,0xce,0x86);
DEFINE_GUID(FORMAT_VideoInfo,0x05589f80,0xc356,0x11ce,0xbf,0x01,0x00,0xaa,0x00,0x55,0x59,0x5a);
DEFINE_GUID(IID_IAMAnalogVideoDecoder,0xc6e13350,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
DEFINE_GUID(IID_IAMCameraControl,0xc6e13370,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
DEFINE_GUID(IID_IAMCrossbar,0xc6e13380,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
DEFINE_GUID(IID_IAMStreamConfig,0xc6e13340,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
DEFINE_GUID(IID_IAMVideoProcAmp,0xc6e13360,0x30ac,0x11d0,0xa1,0x8c,0x00,0xa0,0xc9,0x11,0x89,0x56);
DEFINE_GUID(IID_IBaseFilter,0x56a86895,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(IID_ICaptureGraphBuilder2,0x93e5a4e0,0x2d50,0x11d2,0xab,0xfa,0x00,0xa0,0xc9,0xc6,0xe3,0x8d);
DEFINE_GUID(IID_ICreateDevEnum,0x29840822,0x5b84,0x11d0,0xbd,0x3b,0x00,0xa0,0xc9,0x11,0xce,0x86);
DEFINE_GUID(IID_IGraphBuilder,0x56a868a9,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(IID_IMPEG2PIDMap,0xafb6c2a1,0x2c41,0x11d3,0x8a,0x60,0x00,0x00,0xf8,0x1e,0x0e,0x4a);
DEFINE_GUID(IID_IMediaControl,0x56a868b1,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(IID_IMediaFilter,0x56a86899,0x0ad4,0x11ce,0xb0,0x3a,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(IID_ISampleGrabber,0x6b652fff,0x11fe,0x4fce,0x92,0xad,0x02,0x66,0xb5,0xd7,0xc7,0x8f);
DEFINE_GUID(LOOK_UPSTREAM_ONLY,0xac798be0,0x98e3,0x11d1,0xb3,0xf1,0x00,0xaa,0x00,0x37,0x61,0xc5);
DEFINE_GUID(MEDIASUBTYPE_AYUV,0x56555941,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_IYUV,0x56555949,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_RGB24,0xe436eb7d,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(MEDIASUBTYPE_RGB32,0xe436eb7e,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(MEDIASUBTYPE_RGB555,0xe436eb7c,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(MEDIASUBTYPE_RGB565,0xe436eb7b,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70);
DEFINE_GUID(MEDIASUBTYPE_I420,0x30323449,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_UYVY,0x59565955,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_Y211,0x31313259,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_Y411,0x31313459,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_Y41P,0x50313459,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_YUY2,0x32595559,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_YUYV,0x56595559,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_YV12,0x32315659,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_YVU9,0x39555659,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_YVYU,0x55595659,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIASUBTYPE_MJPG,0x47504A4D, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
DEFINE_GUID(MEDIATYPE_Interleaved,0x73766169,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(MEDIATYPE_Video,0x73646976,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71);
DEFINE_GUID(PIN_CATEGORY_CAPTURE,0xfb6c4281,0x0353,0x11d1,0x90,0x5f,0x00,0x00,0xc0,0xcc,0x16,0xba);
DEFINE_GUID(PIN_CATEGORY_PREVIEW,0xfb6c4282,0x0353,0x11d1,0x90,0x5f,0x00,0x00,0xc0,0xcc,0x16,0xba);
interface ISampleGrabberCB : public IUnknown
{
virtual HRESULT STDMETHODCALLTYPE SampleCB(
double SampleTime,
IMediaSample *pSample) = 0;
virtual HRESULT STDMETHODCALLTYPE BufferCB(
double SampleTime,
BYTE *pBuffer,
LONG BufferLen) = 0;
};
interface ISampleGrabber : public IUnknown
{
virtual HRESULT STDMETHODCALLTYPE SetOneShot(
BOOL OneShot) = 0;
virtual HRESULT STDMETHODCALLTYPE SetMediaType(
const AM_MEDIA_TYPE *pType) = 0;
virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType(
AM_MEDIA_TYPE *pType) = 0;
virtual HRESULT STDMETHODCALLTYPE SetBufferSamples(
BOOL BufferThem) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer(
LONG *pBufferSize,
LONG *pBuffer) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentSample(
IMediaSample **ppSample) = 0;
virtual HRESULT STDMETHODCALLTYPE SetCallback(
ISampleGrabberCB *pCallback,
LONG WhichMethodToCallback) = 0;
};
#ifndef HEADER
#define HEADER(p) (&(((VIDEOINFOHEADER*)(p))->bmiHeader))
#endif
#ifdef _DEBUG
#include <strsafe.h>
static bool gs_verbose = true;
static void DebugPrintOut(const char *format, ...)
{
if (gs_verbose)
{
va_list args;
va_start(args, format);
if( ::IsDebuggerPresent() )
{
CHAR szMsg[512];
::StringCbVPrintfA(szMsg, sizeof(szMsg), format, args);
::OutputDebugStringA(szMsg);
}
else
{
vprintf(format, args);
}
va_end (args);
}
}
#else
#define DebugPrintOut(...) void()
#endif
#define VI_VERSION 0.1995
#define VI_MAX_CAMERAS 20
#define VI_NUM_TYPES 20
#define VI_NUM_FORMATS 18
#define VI_COMPOSITE 0
#define VI_S_VIDEO 1
#define VI_TUNER 2
#define VI_USB 3
#define VI_1394 4
#define VI_NTSC_M 0
#define VI_PAL_B 1
#define VI_PAL_D 2
#define VI_PAL_G 3
#define VI_PAL_H 4
#define VI_PAL_I 5
#define VI_PAL_M 6
#define VI_PAL_N 7
#define VI_PAL_NC 8
#define VI_SECAM_B 9
#define VI_SECAM_D 10
#define VI_SECAM_G 11
#define VI_SECAM_H 12
#define VI_SECAM_K 13
#define VI_SECAM_K1 14
#define VI_SECAM_L 15
#define VI_NTSC_M_J 16
#define VI_NTSC_433 17
struct ICaptureGraphBuilder2;
struct IGraphBuilder;
struct IBaseFilter;
struct IAMCrossbar;
struct IMediaControl;
struct ISampleGrabber;
struct IMediaEventEx;
struct IAMStreamConfig;
struct _AMMediaType;
class SampleGrabberCallback;
typedef _AMMediaType AM_MEDIA_TYPE;
class videoDevice{
public:
videoDevice();
void setSize(int w, int h);
void NukeDownstream(IBaseFilter *pBF);
void destroyGraph();
~videoDevice();
int videoSize;
int width;
int height;
int tryWidth;
int tryHeight;
GUID tryVideoType;
ICaptureGraphBuilder2 *pCaptureGraph;
IGraphBuilder *pGraph;
IMediaControl *pControl;
IBaseFilter *pVideoInputFilter;
IBaseFilter *pGrabberF;
IBaseFilter * pDestFilter;
IAMStreamConfig *streamConf;
ISampleGrabber * pGrabber;
AM_MEDIA_TYPE * pAmMediaType;
IMediaEventEx * pMediaEvent;
GUID videoType;
long formatType;
SampleGrabberCallback * sgCallback;
bool tryDiffSize;
bool useCrossbar;
bool readyToCapture;
bool sizeSet;
bool setupStarted;
bool specificFormat;
bool autoReconnect;
int nFramesForReconnect;
unsigned long nFramesRunning;
int connection;
int storeConn;
int myID;
long requestedFrameTime;
char nDeviceName[255];
WCHAR wDeviceName[255];
unsigned char * pixels;
char * pBuffer;
};
class videoInput{
public:
videoInput();
~videoInput();
static void setVerbose(bool _verbose);
static int listDevices(bool silent = false);
static char * getDeviceName(int deviceID);
void setUseCallback(bool useCallback);
void setIdealFramerate(int deviceID, int idealFramerate);
void setAutoReconnectOnFreeze(int deviceNumber, bool doReconnect, int numMissedFramesBeforeReconnect);
bool setupDevice(int deviceID);
bool setupDevice(int deviceID, int w, int h);
bool setupDeviceFourcc(int deviceID, int w, int h,int fourcc);
bool setupDevice(int deviceID, int connection);
bool setupDevice(int deviceID, int w, int h, int connection);
bool setFourcc(int deviceNumber, int fourcc);
bool setFormat(int deviceNumber, int format);
bool isFrameNew(int deviceID);
bool isDeviceSetup(int deviceID) const;
unsigned char * getPixels(int deviceID, bool flipRedAndBlue = true, bool flipImage = false);
bool getPixels(int id, unsigned char * pixels, bool flipRedAndBlue = true, bool flipImage = false);
void showSettingsWindow(int deviceID);
bool setVideoSettingFilter(int deviceID, long Property, long lValue, long Flags = 0, bool useDefaultValue = false);
bool setVideoSettingFilterPct(int deviceID, long Property, float pctValue, long Flags = 0);
bool getVideoSettingFilter(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue);
bool setVideoSettingCamera(int deviceID, long Property, long lValue, long Flags = 0, bool useDefaultValue = false);
bool setVideoSettingCameraPct(int deviceID, long Property, float pctValue, long Flags = 0);
bool getVideoSettingCamera(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue);
int getWidth(int deviceID) const;
int getHeight(int deviceID) const;
int getSize(int deviceID) const;
int getFourcc(int deviceID) const;
double getFPS(int deviceID) const;
void stopDevice(int deviceID);
bool restartDevice(int deviceID);
int devicesFound;
int getVideoPropertyFromCV(int cv_property);
int getCameraPropertyFromCV(int cv_property);
private:
void setPhyCon(int deviceID, int conn);
void setAttemptCaptureSize(int deviceID, int w, int h,GUID mediaType=MEDIASUBTYPE_RGB24);
bool setup(int deviceID);
void processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip);
int start(int deviceID, videoDevice * VD);
int getDeviceCount();
void getMediaSubtypeAsString(GUID type, char * typeAsString);
GUID *getMediaSubtypeFromFourcc(int fourcc);
int getFourccFromMediaSubtype(GUID type) const;
void getVideoPropertyAsString(int prop, char * propertyAsString);
void getCameraPropertyAsString(int prop, char * propertyAsString);
HRESULT getDevice(IBaseFilter **pSrcFilter, int deviceID, WCHAR * wDeviceName, char * nDeviceName);
static HRESULT ShowFilterPropertyPages(IBaseFilter *pFilter);
static HRESULT ShowStreamPropertyPages(IAMStreamConfig *pStream);
HRESULT SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath);
HRESULT routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter **pVidInFilter, int conType, GUID captureMode);
static bool comInit();
static bool comUnInit();
int connection;
int callbackSetCount;
bool bCallback;
GUID CAPTURE_MODE;
GUID MEDIASUBTYPE_Y800;
GUID MEDIASUBTYPE_Y8;
GUID MEDIASUBTYPE_GREY;
videoDevice * VDList[VI_MAX_CAMERAS];
GUID mediaSubtypes[VI_NUM_TYPES];
long formatTypes[VI_NUM_FORMATS];
static void __cdecl basicThread(void * objPtr);
static char deviceNames[VI_MAX_CAMERAS][255];
};
static void MyFreeMediaType(AM_MEDIA_TYPE& mt){
if (mt.cbFormat != 0)
{
CoTaskMemFree((PVOID)mt.pbFormat);
mt.cbFormat = 0;
mt.pbFormat = NULL;
}
if (mt.pUnk != NULL)
{
mt.pUnk->Release();
mt.pUnk = NULL;
}
}
static void MyDeleteMediaType(AM_MEDIA_TYPE *pmt)
{
if (pmt != NULL)
{
MyFreeMediaType(*pmt);
CoTaskMemFree(pmt);
}
}
class SampleGrabberCallback : public ISampleGrabberCB{
public:
SampleGrabberCallback(){
InitializeCriticalSection(&critSection);
freezeCheck = 0;
bufferSetup = false;
newFrame = false;
latestBufferLength = 0;
hEvent = CreateEvent(NULL, true, false, NULL);
}
virtual ~SampleGrabberCallback(){
ptrBuffer = NULL;
DeleteCriticalSection(&critSection);
CloseHandle(hEvent);
if(bufferSetup){
delete[] pixels;
}
}
bool setupBuffer(int numBytesIn){
if(bufferSetup){
return false;
}else{
numBytes = numBytesIn;
pixels = new unsigned char[numBytes];
bufferSetup = true;
newFrame = false;
latestBufferLength = 0;
}
return true;
}
STDMETHODIMP_(ULONG) AddRef() { return 1; }
STDMETHODIMP_(ULONG) Release() { return 2; }
STDMETHODIMP QueryInterface(REFIID, void **ppvObject){
*ppvObject = static_cast<ISampleGrabberCB*>(this);
return S_OK;
}
STDMETHODIMP SampleCB(double , IMediaSample *pSample){
if(WaitForSingleObject(hEvent, 0) == WAIT_OBJECT_0) return S_OK;
HRESULT hr = pSample->GetPointer(&ptrBuffer);
if(hr == S_OK){
latestBufferLength = pSample->GetActualDataLength();
if(latestBufferLength == numBytes){
EnterCriticalSection(&critSection);
memcpy(pixels, ptrBuffer, latestBufferLength);
newFrame = true;
freezeCheck = 1;
LeaveCriticalSection(&critSection);
SetEvent(hEvent);
}else{
DebugPrintOut("ERROR: SampleCB() - buffer sizes do not match\n");
}
}
return S_OK;
}
STDMETHODIMP BufferCB(double, BYTE *, long){
return E_NOTIMPL;
}
int freezeCheck;
int latestBufferLength;
int numBytes;
bool newFrame;
bool bufferSetup;
unsigned char * pixels;
unsigned char * ptrBuffer;
CRITICAL_SECTION critSection;
HANDLE hEvent;
};
videoDevice::videoDevice(){
pCaptureGraph = NULL;
pGraph = NULL;
pControl = NULL;
pVideoInputFilter = NULL;
pGrabber = NULL;
pDestFilter = NULL;
pGrabberF = NULL;
pMediaEvent = NULL;
streamConf = NULL;
pAmMediaType = NULL;
sgCallback = new SampleGrabberCallback();
sgCallback->newFrame = false;
videoType = MEDIASUBTYPE_RGB24;
connection = PhysConn_Video_Composite;
storeConn = 0;
videoSize = 0;
width = 0;
height = 0;
tryWidth = 640;
tryHeight = 480;
tryVideoType = MEDIASUBTYPE_RGB24;
nFramesForReconnect= 10000;
nFramesRunning = 0;
myID = -1;
tryDiffSize = true;
useCrossbar = false;
readyToCapture = false;
sizeSet = false;
setupStarted = false;
specificFormat = false;
autoReconnect = false;
requestedFrameTime = -1;
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
memset(nDeviceName, 0, sizeof(char) * 255);
}
void videoDevice::setSize(int w, int h){
if(sizeSet){
DebugPrintOut("SETUP: Error device size should not be set more than once\n");
}
else
{
width = w;
height = h;
videoSize = w*h*3;
sizeSet = true;
pixels = new unsigned char[videoSize];
pBuffer = new char[videoSize];
memset(pixels, 0 , videoSize);
sgCallback->setupBuffer(videoSize);
}
}
void videoDevice::NukeDownstream(IBaseFilter *pBF){
IPin *pP, *pTo;
ULONG u;
IEnumPins *pins = NULL;
PIN_INFO pininfo;
HRESULT hr = pBF->EnumPins(&pins);
pins->Reset();
while (hr == NOERROR)
{
hr = pins->Next(1, &pP, &u);
if (hr == S_OK && pP)
{
pP->ConnectedTo(&pTo);
if (pTo)
{
hr = pTo->QueryPinInfo(&pininfo);
if (hr == NOERROR)
{
if (pininfo.dir == PINDIR_INPUT)
{
NukeDownstream(pininfo.pFilter);
pGraph->Disconnect(pTo);
pGraph->Disconnect(pP);
pGraph->RemoveFilter(pininfo.pFilter);
}
pininfo.pFilter->Release();
pininfo.pFilter = NULL;
}
pTo->Release();
}
pP->Release();
}
}
if (pins) pins->Release();
}
void videoDevice::destroyGraph(){
HRESULT hr = 0;
int i = 0;
while (hr == NOERROR)
{
IEnumFilters * pEnum = 0;
ULONG cFetched;
hr = pGraph->EnumFilters(&pEnum);
if (FAILED(hr)) { DebugPrintOut("SETUP: pGraph->EnumFilters() failed.\n"); return; }
IBaseFilter * pFilter = NULL;
if (pEnum->Next(1, &pFilter, &cFetched) == S_OK)
{
FILTER_INFO FilterInfo;
memset(&FilterInfo, 0, sizeof(FilterInfo));
hr = pFilter->QueryFilterInfo(&FilterInfo);
FilterInfo.pGraph->Release();
int count = 0;
char buffer[255];
memset(buffer, 0, 255 * sizeof(char));
while( FilterInfo.achName[count] != 0x00 )
{
buffer[count] = (char)FilterInfo.achName[count];
count++;
}
DebugPrintOut("SETUP: removing filter %s...\n", buffer);
hr = pGraph->RemoveFilter(pFilter);
if (FAILED(hr)) { DebugPrintOut("SETUP: pGraph->RemoveFilter() failed.\n"); return; }
DebugPrintOut("SETUP: filter removed %s\n",buffer);
pFilter->Release();
pFilter = NULL;
}
else break;
pEnum->Release();
pEnum = NULL;
i++;
}
return;
}
videoDevice::~videoDevice(){
if(setupStarted){ DebugPrintOut("\nSETUP: Disconnecting device %i\n", myID); }
else{
if(sgCallback){
sgCallback->Release();
delete sgCallback;
}
return;
}
HRESULT HR = NOERROR;
if( (sgCallback) && (pGrabber) )
{
pGrabber->SetCallback(NULL, 1);
DebugPrintOut("SETUP: freeing Grabber Callback\n");
sgCallback->Release();
if(sizeSet){
delete[] pixels;
delete[] pBuffer;
}
delete sgCallback;
}
if( (pControl) )
{
HR = pControl->Pause();
if (FAILED(HR)) DebugPrintOut("ERROR - Could not pause pControl\n");
HR = pControl->Stop();
if (FAILED(HR)) DebugPrintOut("ERROR - Could not stop pControl\n");
}
if( (pVideoInputFilter) )NukeDownstream(pVideoInputFilter);
if( (pDestFilter) ){ DebugPrintOut("SETUP: freeing Renderer\n");
(pDestFilter)->Release();
(pDestFilter) = 0;
}
if( (pVideoInputFilter) ){ DebugPrintOut("SETUP: freeing Capture Source\n");
(pVideoInputFilter)->Release();
(pVideoInputFilter) = 0;
}
if( (pGrabberF) ){ DebugPrintOut("SETUP: freeing Grabber Filter\n");
(pGrabberF)->Release();
(pGrabberF) = 0;
}
if( (pGrabber) ){ DebugPrintOut("SETUP: freeing Grabber\n");
(pGrabber)->Release();
(pGrabber) = 0;
}
if( (pControl) ){ DebugPrintOut("SETUP: freeing Control\n");
(pControl)->Release();
(pControl) = 0;
}
if( (pMediaEvent) ){ DebugPrintOut("SETUP: freeing Media Event\n");
(pMediaEvent)->Release();
(pMediaEvent) = 0;
}
if( (streamConf) ){ DebugPrintOut("SETUP: freeing Stream\n");
(streamConf)->Release();
(streamConf) = 0;
}
if( (pAmMediaType) ){ DebugPrintOut("SETUP: freeing Media Type\n");
MyDeleteMediaType(pAmMediaType);
}
if((pMediaEvent)){
DebugPrintOut("SETUP: freeing Media Event\n");
(pMediaEvent)->Release();
(pMediaEvent) = 0;
}
if( (pGraph) )destroyGraph();
if( (pCaptureGraph) ){ DebugPrintOut("SETUP: freeing Capture Graph\n");
(pCaptureGraph)->Release();
(pCaptureGraph) = 0;
}
if( (pGraph) ){ DebugPrintOut("SETUP: freeing Main Graph\n");
(pGraph)->Release();
(pGraph) = 0;
}
delete pDestFilter;
delete pVideoInputFilter;
delete pGrabberF;
delete pGrabber;
delete pControl;
delete streamConf;
delete pMediaEvent;
delete pCaptureGraph;
delete pGraph;
DebugPrintOut("SETUP: Device %i disconnected and freed\n\n",myID);
}
videoInput::videoInput(){
comInit();
devicesFound = 0;
callbackSetCount = 0;
bCallback = true;
for(int i=0; i<VI_MAX_CAMERAS; i++) VDList[i] = new videoDevice();
DebugPrintOut("\n***** VIDEOINPUT LIBRARY - %2.04f - TFW07 *****\n\n",VI_VERSION);
mediaSubtypes[0] = MEDIASUBTYPE_RGB24;
mediaSubtypes[1] = MEDIASUBTYPE_RGB32;
mediaSubtypes[2] = MEDIASUBTYPE_RGB555;
mediaSubtypes[3] = MEDIASUBTYPE_RGB565;
mediaSubtypes[4] = MEDIASUBTYPE_YUY2;
mediaSubtypes[5] = MEDIASUBTYPE_YVYU;
mediaSubtypes[6] = MEDIASUBTYPE_YUYV;
mediaSubtypes[7] = MEDIASUBTYPE_IYUV;
mediaSubtypes[8] = MEDIASUBTYPE_UYVY;
mediaSubtypes[9] = MEDIASUBTYPE_YV12;
mediaSubtypes[10] = MEDIASUBTYPE_YVU9;
mediaSubtypes[11] = MEDIASUBTYPE_Y411;
mediaSubtypes[12] = MEDIASUBTYPE_Y41P;
mediaSubtypes[13] = MEDIASUBTYPE_Y211;
mediaSubtypes[14] = MEDIASUBTYPE_AYUV;
mediaSubtypes[15] = MEDIASUBTYPE_MJPG;
mediaSubtypes[16] = MEDIASUBTYPE_Y800;
mediaSubtypes[17] = MEDIASUBTYPE_Y8;
mediaSubtypes[18] = MEDIASUBTYPE_GREY;
mediaSubtypes[19] = MEDIASUBTYPE_I420;
formatTypes[VI_NTSC_M] = AnalogVideo_NTSC_M;
formatTypes[VI_NTSC_M_J] = AnalogVideo_NTSC_M_J;
formatTypes[VI_NTSC_433] = AnalogVideo_NTSC_433;
formatTypes[VI_PAL_B] = AnalogVideo_PAL_B;
formatTypes[VI_PAL_D] = AnalogVideo_PAL_D;
formatTypes[VI_PAL_G] = AnalogVideo_PAL_G;
formatTypes[VI_PAL_H] = AnalogVideo_PAL_H;
formatTypes[VI_PAL_I] = AnalogVideo_PAL_I;
formatTypes[VI_PAL_M] = AnalogVideo_PAL_M;
formatTypes[VI_PAL_N] = AnalogVideo_PAL_N;
formatTypes[VI_PAL_NC] = AnalogVideo_PAL_N_COMBO;
formatTypes[VI_SECAM_B] = AnalogVideo_SECAM_B;
formatTypes[VI_SECAM_D] = AnalogVideo_SECAM_D;
formatTypes[VI_SECAM_G] = AnalogVideo_SECAM_G;
formatTypes[VI_SECAM_H] = AnalogVideo_SECAM_H;
formatTypes[VI_SECAM_K] = AnalogVideo_SECAM_K;
formatTypes[VI_SECAM_K1] = AnalogVideo_SECAM_K1;
formatTypes[VI_SECAM_L] = AnalogVideo_SECAM_L;
}
void videoInput::setVerbose(bool _verbose){
#ifdef _DEBUG
gs_verbose = _verbose;
#else
(void)_verbose;
#endif
}
void videoInput::setUseCallback(bool useCallback){
if(callbackSetCount == 0){
bCallback = useCallback;
callbackSetCount = 1;
}else{
DebugPrintOut("ERROR: setUseCallback can only be called before setup\n");
}
}
void videoInput::setIdealFramerate(int deviceNumber, int idealFramerate){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return;
if( idealFramerate > 0 ){
VDList[deviceNumber]->requestedFrameTime = (unsigned long)(10000000 / idealFramerate);
}
}
void videoInput::setAutoReconnectOnFreeze(int deviceNumber, bool doReconnect, int numMissedFramesBeforeReconnect){
if(deviceNumber >= VI_MAX_CAMERAS) return;
VDList[deviceNumber]->autoReconnect = doReconnect;
VDList[deviceNumber]->nFramesForReconnect = numMissedFramesBeforeReconnect;
}
bool videoInput::setupDevice(int deviceNumber){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setupDevice(int deviceNumber, int _connection){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
setPhyCon(deviceNumber, _connection);
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setupDevice(int deviceNumber, int w, int h){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
setAttemptCaptureSize(deviceNumber,w,h);
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setupDeviceFourcc(int deviceNumber, int w, int h,int fourcc){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
if ( fourcc != -1 ) {
GUID *mediaType = getMediaSubtypeFromFourcc(fourcc);
if ( mediaType ) {
setAttemptCaptureSize(deviceNumber,w,h,*mediaType);
}
} else {
setAttemptCaptureSize(deviceNumber,w,h);
}
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setupDevice(int deviceNumber, int w, int h, int _connection){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
setAttemptCaptureSize(deviceNumber,w,h);
setPhyCon(deviceNumber, _connection);
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setFormat(int deviceNumber, int format){
if(deviceNumber >= VI_MAX_CAMERAS || !VDList[deviceNumber]->readyToCapture) return false;
bool returnVal = false;
if(format >= 0 && format < VI_NUM_FORMATS){
VDList[deviceNumber]->formatType = formatTypes[format];
VDList[deviceNumber]->specificFormat = true;
if(VDList[deviceNumber]->specificFormat){
HRESULT hr = getDevice(&VDList[deviceNumber]->pVideoInputFilter, deviceNumber, VDList[deviceNumber]->wDeviceName, VDList[deviceNumber]->nDeviceName);
if(hr != S_OK){
return false;
}
IAMAnalogVideoDecoder *pVideoDec = NULL;
hr = VDList[deviceNumber]->pCaptureGraph->FindInterface(NULL, &MEDIATYPE_Video, VDList[deviceNumber]->pVideoInputFilter, IID_IAMAnalogVideoDecoder, (void **)&pVideoDec);
if(VDList[deviceNumber]->pVideoInputFilter)VDList[deviceNumber]->pVideoInputFilter->Release();
if(VDList[deviceNumber]->pVideoInputFilter)VDList[deviceNumber]->pVideoInputFilter = NULL;
if(FAILED(hr)){
DebugPrintOut("SETUP: couldn't set requested format\n");
}else{
long lValue = 0;
hr = pVideoDec->get_AvailableTVFormats(&lValue);
if( SUCCEEDED(hr) && (lValue & VDList[deviceNumber]->formatType) )
{
hr = pVideoDec->put_TVFormat(VDList[deviceNumber]->formatType);
if( FAILED(hr) ){
DebugPrintOut("SETUP: couldn't set requested format\n");
}else{
returnVal = true;
}
}
pVideoDec->Release();
pVideoDec = NULL;
}
}
}
return returnVal;
}
char videoInput::deviceNames[VI_MAX_CAMERAS][255]={{0}};
char * videoInput::getDeviceName(int deviceID){
if( deviceID >= VI_MAX_CAMERAS ){
return NULL;
}
return deviceNames[deviceID];
}
int videoInput::listDevices(bool silent){
comInit();
if(!silent) DebugPrintOut("\nVIDEOINPUT SPY MODE!\n\n");
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
int deviceCounter = 0;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
reinterpret_cast<void**>(&pDevEnum));
if (SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(
CLSID_VideoInputDeviceCategory,
&pEnum, 0);
if(hr == S_OK){
if(!silent) DebugPrintOut("SETUP: Looking For Capture Devices\n");
IMoniker *pMoniker = NULL;
while (pEnum->Next(1, &pMoniker, NULL) == S_OK){
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void**)(&pPropBag));
if (FAILED(hr)){
pMoniker->Release();
continue;
}
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"Description", &varName, 0);
if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr)){
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
int count = 0;
int maxLen = sizeof(deviceNames[0])/sizeof(deviceNames[0][0]) - 2;
while( varName.bstrVal[count] != 0x00 && count < maxLen) {
deviceNames[deviceCounter][count] = (char)varName.bstrVal[count];
count++;
}
deviceNames[deviceCounter][count] = 0;
if(!silent) DebugPrintOut("SETUP: %i) %s\n",deviceCounter, deviceNames[deviceCounter]);
}
pPropBag->Release();
pPropBag = NULL;
pMoniker->Release();
pMoniker = NULL;
deviceCounter++;
}
pDevEnum->Release();
pDevEnum = NULL;
pEnum->Release();
pEnum = NULL;
}
if(!silent) DebugPrintOut("SETUP: %i Device(s) found\n\n", deviceCounter);
}
comUnInit();
return deviceCounter;
}
int videoInput::getWidth(int id) const
{
if(isDeviceSetup(id))
{
return VDList[id] ->width;
}
return 0;
}
int videoInput::getHeight(int id) const
{
if(isDeviceSetup(id))
{
return VDList[id] ->height;
}
return 0;
}
int videoInput::getFourcc(int id) const
{
if(isDeviceSetup(id))
{
return getFourccFromMediaSubtype(VDList[id]->videoType);
}
return 0;
}
double videoInput::getFPS(int id) const
{
if(isDeviceSetup(id))
{
double frameTime= VDList[id]->requestedFrameTime;
if (frameTime>0) {
return (10000000.0 / frameTime);
}
}
return 0;
}
int videoInput::getSize(int id) const
{
if(isDeviceSetup(id))
{
return VDList[id] ->videoSize;
}
return 0;
}
bool videoInput::getPixels(int id, unsigned char * dstBuffer, bool flipRedAndBlue, bool flipImage){
bool success = false;
if(isDeviceSetup(id)){
if(bCallback){
DWORD result = WaitForSingleObject(VDList[id]->sgCallback->hEvent, 1000);
if( result != WAIT_OBJECT_0) return false;
EnterCriticalSection(&VDList[id]->sgCallback->critSection);
unsigned char * src = VDList[id]->sgCallback->pixels;
unsigned char * dst = dstBuffer;
int height = VDList[id]->height;
int width = VDList[id]->width;
processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
VDList[id]->sgCallback->newFrame = false;
LeaveCriticalSection(&VDList[id]->sgCallback->critSection);
ResetEvent(VDList[id]->sgCallback->hEvent);
success = true;
}
else{
long bufferSize = VDList[id]->videoSize;
HRESULT hr = VDList[id]->pGrabber->GetCurrentBuffer(&bufferSize, (long *)VDList[id]->pBuffer);
if(hr==S_OK){
int numBytes = VDList[id]->videoSize;
if (numBytes == bufferSize){
unsigned char * src = (unsigned char * )VDList[id]->pBuffer;
unsigned char * dst = dstBuffer;
int height = VDList[id]->height;
int width = VDList[id]->width;
processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
success = true;
}else{
DebugPrintOut("ERROR: GetPixels() - bufferSizes do not match!\n");
}
}else{
DebugPrintOut("ERROR: GetPixels() - Unable to grab frame for device %i\n", id);
}
}
}
return success;
}
unsigned char * videoInput::getPixels(int id, bool flipRedAndBlue, bool flipImage){
if(isDeviceSetup(id)){
getPixels(id, VDList[id]->pixels, flipRedAndBlue, flipImage);
}
return VDList[id]->pixels;
}
bool videoInput::isFrameNew(int id){
if(!isDeviceSetup(id)) return false;
if(!bCallback)return true;
bool result = false;
bool freeze = false;
EnterCriticalSection(&VDList[id]->sgCallback->critSection);
result = VDList[id]->sgCallback->newFrame;
if(VDList[id]->nFramesRunning > 400 && VDList[id]->sgCallback->freezeCheck > VDList[id]->nFramesForReconnect ){
freeze = true;
}
VDList[id]->sgCallback->freezeCheck++;
LeaveCriticalSection(&VDList[id]->sgCallback->critSection);
VDList[id]->nFramesRunning++;
if(freeze && VDList[id]->autoReconnect){
DebugPrintOut("ERROR: Device seems frozen - attempting to reconnect\n");
if( !restartDevice(VDList[id]->myID) ){
DebugPrintOut("ERROR: Unable to reconnect to device\n");
}else{
DebugPrintOut("SUCCESS: Able to reconnect to device\n");
}
}
return result;
}
bool videoInput::isDeviceSetup(int id) const
{
if(id>=0 && id<devicesFound && VDList[id]->readyToCapture)return true;
else return false;
}
void __cdecl videoInput::basicThread(void * objPtr){
videoDevice * vd = *( (videoDevice **)(objPtr) );
ShowFilterPropertyPages(vd->pVideoInputFilter);
if(vd->pVideoInputFilter)vd->pVideoInputFilter->Release();
if(vd->pVideoInputFilter)vd->pVideoInputFilter = NULL;
return;
}
void videoInput::showSettingsWindow(int id){
if(isDeviceSetup(id)){
HRESULT hr = getDevice(&VDList[id]->pVideoInputFilter, id, VDList[id]->wDeviceName, VDList[id]->nDeviceName);
if(hr == S_OK){
_beginthread(basicThread, 0, (void *)&VDList[id]);
}
}
}
bool videoInput::getVideoSettingFilter(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue){
if( !isDeviceSetup(deviceID) )return false;
HRESULT hr;
videoDevice * VD = VDList[deviceID];
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (FAILED(hr)){
DebugPrintOut("setVideoSetting - getDevice Error\n");
return false;
}
IAMVideoProcAmp *pAMVideoProcAmp = NULL;
hr = VD->pVideoInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pAMVideoProcAmp);
if(FAILED(hr)){
DebugPrintOut("setVideoSetting - QueryInterface Error\n");
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return false;
}
char propStr[16];
getVideoPropertyAsString(Property,propStr);
DebugPrintOut("Setting video setting %s.\n", propStr);
pAMVideoProcAmp->GetRange(Property, &min, &max, &SteppingDelta, &defaultValue, &flags);
DebugPrintOut("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, min, max, SteppingDelta, defaultValue, flags);
pAMVideoProcAmp->Get(Property, ¤tValue, &flags);
if(pAMVideoProcAmp)pAMVideoProcAmp->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return true;
}
bool videoInput::setVideoSettingFilterPct(int deviceID, long Property, float pctValue, long Flags){
if( !isDeviceSetup(deviceID) )return false;
long min, max, currentValue, flags, defaultValue, stepAmnt;
if( !getVideoSettingFilter(deviceID, Property, min, max, stepAmnt, currentValue, flags, defaultValue) )return false;
if(pctValue > 1.0)pctValue = 1.0;
else if(pctValue < 0)pctValue = 0.0;
float range = (float)max - (float)min;
if(range <= 0)return false;
if(stepAmnt == 0) return false;
long value = (long)( (float)min + range * pctValue );
long rasterValue = value;
if( range == stepAmnt ){
if( pctValue < 0.5)rasterValue = min;
else rasterValue = max;
}else{
long mod = value % stepAmnt;
float halfStep = (float)stepAmnt * 0.5f;
if( mod < halfStep ) rasterValue -= mod;
else rasterValue += stepAmnt - mod;
DebugPrintOut("RASTER - pctValue is %f - value is %li - step is %li - mod is %li - rasterValue is %li\n", pctValue, value, stepAmnt, mod, rasterValue);
}
return setVideoSettingFilter(deviceID, Property, rasterValue, Flags, false);
}
bool videoInput::setVideoSettingFilter(int deviceID, long Property, long lValue, long Flags, bool useDefaultValue){
if( !isDeviceSetup(deviceID) )return false;
HRESULT hr;
char propStr[16];
getVideoPropertyAsString(Property,propStr);
videoDevice * VD = VDList[deviceID];
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (FAILED(hr)){
DebugPrintOut("setVideoSetting - getDevice Error\n");
return false;
}
IAMVideoProcAmp *pAMVideoProcAmp = NULL;
hr = VD->pVideoInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pAMVideoProcAmp);
if(FAILED(hr)){
DebugPrintOut("setVideoSetting - QueryInterface Error\n");
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return false;
}
DebugPrintOut("Setting video setting %s.\n", propStr);
long CurrVal, Min, Max, SteppingDelta, Default, CapsFlags, AvailableCapsFlags = 0;
pAMVideoProcAmp->GetRange(Property, &Min, &Max, &SteppingDelta, &Default, &AvailableCapsFlags);
DebugPrintOut("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, Min, Max, SteppingDelta, Default, AvailableCapsFlags);
pAMVideoProcAmp->Get(Property, &CurrVal, &CapsFlags);
DebugPrintOut("Current value: %ld Flags %ld (%s)\n", CurrVal, CapsFlags, (CapsFlags == 1 ? "Auto" : (CapsFlags == 2 ? "Manual" : "Unknown")));
if (useDefaultValue) {
pAMVideoProcAmp->Set(Property, Default, VideoProcAmp_Flags_Auto);
}
else{
pAMVideoProcAmp->Set(Property, lValue, Flags);
}
if(pAMVideoProcAmp)pAMVideoProcAmp->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return true;
}
bool videoInput::setVideoSettingCameraPct(int deviceID, long Property, float pctValue, long Flags){
if( !isDeviceSetup(deviceID) )return false;
long min, max, currentValue, flags, defaultValue, stepAmnt;
if( !getVideoSettingCamera(deviceID, Property, min, max, stepAmnt, currentValue, flags, defaultValue) )return false;
if(pctValue > 1.0)pctValue = 1.0;
else if(pctValue < 0)pctValue = 0.0;
float range = (float)max - (float)min;
if(range <= 0)return false;
if(stepAmnt == 0) return false;
long value = (long)( (float)min + range * pctValue );
long rasterValue = value;
if( range == stepAmnt ){
if( pctValue < 0.5)rasterValue = min;
else rasterValue = max;
}else{
long mod = value % stepAmnt;
float halfStep = (float)stepAmnt * 0.5f;
if( mod < halfStep ) rasterValue -= mod;
else rasterValue += stepAmnt - mod;
DebugPrintOut("RASTER - pctValue is %f - value is %li - step is %li - mod is %li - rasterValue is %li\n", pctValue, value, stepAmnt, mod, rasterValue);
}
return setVideoSettingCamera(deviceID, Property, rasterValue, Flags, false);
}
bool videoInput::setVideoSettingCamera(int deviceID, long Property, long lValue, long Flags, bool useDefaultValue){
IAMCameraControl *pIAMCameraControl;
if(isDeviceSetup(deviceID))
{
HRESULT hr;
hr = getDevice(&VDList[deviceID]->pVideoInputFilter, deviceID, VDList[deviceID]->wDeviceName, VDList[deviceID]->nDeviceName);
char propStr[16];
getCameraPropertyAsString(Property,propStr);
DebugPrintOut("Setting video setting %s.\n", propStr);
hr = VDList[deviceID]->pVideoInputFilter->QueryInterface(IID_IAMCameraControl, (void**)&pIAMCameraControl);
if (FAILED(hr)) {
DebugPrintOut("Error\n");
if(VDList[deviceID]->pVideoInputFilter)VDList[deviceID]->pVideoInputFilter->Release();
if(VDList[deviceID]->pVideoInputFilter)VDList[deviceID]->pVideoInputFilter = NULL;
return false;
}
else
{
long CurrVal, Min, Max, SteppingDelta, Default, CapsFlags, AvailableCapsFlags;
pIAMCameraControl->GetRange(Property, &Min, &Max, &SteppingDelta, &Default, &AvailableCapsFlags);
DebugPrintOut("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, Min, Max, SteppingDelta, Default, AvailableCapsFlags);
pIAMCameraControl->Get(Property, &CurrVal, &CapsFlags);
DebugPrintOut("Current value: %ld Flags %ld (%s)\n", CurrVal, CapsFlags, (CapsFlags == 1 ? "Auto" : (CapsFlags == 2 ? "Manual" : "Unknown")));
if (useDefaultValue) {
pIAMCameraControl->Set(Property, Default, CameraControl_Flags_Auto);
}
else
{
pIAMCameraControl->Set(Property, lValue, Flags);
}
pIAMCameraControl->Release();
if(VDList[deviceID]->pVideoInputFilter)VDList[deviceID]->pVideoInputFilter->Release();
if(VDList[deviceID]->pVideoInputFilter)VDList[deviceID]->pVideoInputFilter = NULL;
return true;
}
}
return false;
}
bool videoInput::getVideoSettingCamera(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue){
if( !isDeviceSetup(deviceID) )return false;
HRESULT hr;
videoDevice * VD = VDList[deviceID];
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (FAILED(hr)){
DebugPrintOut("setVideoSetting - getDevice Error\n");
return false;
}
IAMCameraControl *pIAMCameraControl = NULL;
hr = VD->pVideoInputFilter->QueryInterface(IID_IAMCameraControl, (void**)&pIAMCameraControl);
if(FAILED(hr)){
DebugPrintOut("setVideoSetting - QueryInterface Error\n");
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return false;
}
char propStr[16];
getCameraPropertyAsString(Property,propStr);
DebugPrintOut("Setting video setting %s.\n", propStr);
pIAMCameraControl->GetRange(Property, &min, &max, &SteppingDelta, &defaultValue, &flags);
DebugPrintOut("Range for video setting %s: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", propStr, min, max, SteppingDelta, defaultValue, flags);
pIAMCameraControl->Get(Property, ¤tValue, &flags);
if(pIAMCameraControl)pIAMCameraControl->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return true;
}
void videoInput::stopDevice(int id){
if(id < VI_MAX_CAMERAS)
{
delete VDList[id];
VDList[id] = new videoDevice();
}
}
bool videoInput::restartDevice(int id){
if(isDeviceSetup(id))
{
int conn = VDList[id]->storeConn;
int tmpW = VDList[id]->width;
int tmpH = VDList[id]->height;
bool bFormat = VDList[id]->specificFormat;
long format = VDList[id]->formatType;
int nReconnect = VDList[id]->nFramesForReconnect;
bool bReconnect = VDList[id]->autoReconnect;
unsigned long avgFrameTime = VDList[id]->requestedFrameTime;
stopDevice(id);
if( avgFrameTime != (unsigned long)-1){
VDList[id]->requestedFrameTime = avgFrameTime;
}
if( setupDevice(id, tmpW, tmpH, conn) ){
if( bFormat ){
setFormat(id, format);
}
if( bReconnect ){
setAutoReconnectOnFreeze(id, true, nReconnect);
}
return true;
}
}
return false;
}
videoInput::~videoInput(){
for(int i = 0; i < VI_MAX_CAMERAS; i++)
{
delete VDList[i];
}
comUnInit();
}
bool videoInput::comInit(){
return true;
}
bool videoInput::comUnInit(){
return true;
}
void videoInput::setAttemptCaptureSize(int id, int w, int h,GUID mediaType){
VDList[id]->tryWidth = w;
VDList[id]->tryHeight = h;
VDList[id]->tryDiffSize = true;
VDList[id]->tryVideoType = mediaType;
}
void videoInput::setPhyCon(int id, int conn){
switch(conn){
case 0:
VDList[id]->connection = PhysConn_Video_Composite;
break;
case 1:
VDList[id]->connection = PhysConn_Video_SVideo;
break;
case 2:
VDList[id]->connection = PhysConn_Video_Tuner;
break;
case 3:
VDList[id]->connection = PhysConn_Video_USB;
break;
case 4:
VDList[id]->connection = PhysConn_Video_1394;
break;
default:
return;
break;
}
VDList[id]->storeConn = conn;
VDList[id]->useCrossbar = true;
}
bool videoInput::setup(int deviceNumber){
devicesFound = getDeviceCount();
if(deviceNumber>devicesFound-1)
{
DebugPrintOut("SETUP: device[%i] not found - you have %i devices available\n", deviceNumber, devicesFound);
if(devicesFound>=0) DebugPrintOut("SETUP: this means that the last device you can use is device[%i]\n", devicesFound-1);
return false;
}
if(VDList[deviceNumber]->readyToCapture)
{
DebugPrintOut("SETUP: can't setup, device %i is currently being used\n",VDList[deviceNumber]->myID);
return false;
}
HRESULT hr = start(deviceNumber, VDList[deviceNumber]);
if(hr == S_OK)return true;
else return false;
}
void videoInput::processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip){
int widthInBytes = width * 3;
int numBytes = widthInBytes * height;
if(!bRGB){
if(bFlip){
for(int y = 0; y < height; y++){
memcpy(dst + (y * widthInBytes), src + ( (height -y -1) * widthInBytes), widthInBytes);
}
}else{
memcpy(dst, src, numBytes);
}
}else{
if(bFlip){
int x = 0;
int y = (height - 1) * widthInBytes;
src += y;
for(int i = 0; i < numBytes; i+=3){
if(x >= width){
x = 0;
src -= widthInBytes*2;
}
*dst = *(src+2);
dst++;
*dst = *(src+1);
dst++;
*dst = *src;
dst++;
src+=3;
x++;
}
}
else{
for(int i = 0; i < numBytes; i+=3){
*dst = *(src+2);
dst++;
*dst = *(src+1);
dst++;
*dst = *src;
dst++;
src+=3;
}
}
}
}
void videoInput::getMediaSubtypeAsString(GUID type, char * typeAsString){
char tmpStr[8];
if( type == MEDIASUBTYPE_RGB24) sprintf(tmpStr, "RGB24");
else if(type == MEDIASUBTYPE_RGB32) sprintf(tmpStr, "RGB32");
else if(type == MEDIASUBTYPE_RGB555)sprintf(tmpStr, "RGB555");
else if(type == MEDIASUBTYPE_RGB565)sprintf(tmpStr, "RGB565");
else if(type == MEDIASUBTYPE_YUY2) sprintf(tmpStr, "YUY2");
else if(type == MEDIASUBTYPE_YVYU) sprintf(tmpStr, "YVYU");
else if(type == MEDIASUBTYPE_YUYV) sprintf(tmpStr, "YUYV");
else if(type == MEDIASUBTYPE_IYUV) sprintf(tmpStr, "IYUV");
else if(type == MEDIASUBTYPE_UYVY) sprintf(tmpStr, "UYVY");
else if(type == MEDIASUBTYPE_YV12) sprintf(tmpStr, "YV12");
else if(type == MEDIASUBTYPE_YVU9) sprintf(tmpStr, "YVU9");
else if(type == MEDIASUBTYPE_Y411) sprintf(tmpStr, "Y411");
else if(type == MEDIASUBTYPE_Y41P) sprintf(tmpStr, "Y41P");
else if(type == MEDIASUBTYPE_Y211) sprintf(tmpStr, "Y211");
else if(type == MEDIASUBTYPE_AYUV) sprintf(tmpStr, "AYUV");
else if(type == MEDIASUBTYPE_MJPG) sprintf(tmpStr, "MJPG");
else if(type == MEDIASUBTYPE_Y800) sprintf(tmpStr, "Y800");
else if(type == MEDIASUBTYPE_Y8) sprintf(tmpStr, "Y8");
else if(type == MEDIASUBTYPE_GREY) sprintf(tmpStr, "GREY");
else if(type == MEDIASUBTYPE_I420) sprintf(tmpStr, "I420");
else sprintf(tmpStr, "OTHER");
memcpy(typeAsString, tmpStr, sizeof(char)*8);
}
int videoInput::getFourccFromMediaSubtype(GUID type) const
{
return type.Data1;
}
GUID *videoInput::getMediaSubtypeFromFourcc(int fourcc){
for (int i=0;i<VI_NUM_TYPES;i++) {
if ( (unsigned long)(unsigned)fourcc == mediaSubtypes[i].Data1 ) {
return &mediaSubtypes[i];
}
}
return NULL;
}
void videoInput::getVideoPropertyAsString(int prop, char * propertyAsString){
char tmpStr[16];
if ( prop==VideoProcAmp_Brightness) sprintf(tmpStr, "Brightness");
else if ( prop==VideoProcAmp_Contrast) sprintf(tmpStr, "Contrast");
else if ( prop==VideoProcAmp_Saturation) sprintf(tmpStr, "Saturation");
else if ( prop==VideoProcAmp_Hue) sprintf(tmpStr, "Hue");
else if ( prop==VideoProcAmp_Gain) sprintf(tmpStr, "Gain");
else if ( prop==VideoProcAmp_Gamma) sprintf(tmpStr, "Gamma");
else if ( prop==VideoProcAmp_ColorEnable) sprintf(tmpStr, "ColorEnable");
else if ( prop==VideoProcAmp_Sharpness) sprintf(tmpStr, "Sharpness");
else sprintf(tmpStr, "%u",prop);
memcpy(propertyAsString, tmpStr, sizeof(char)*16);
}
int videoInput::getVideoPropertyFromCV(int cv_property){
switch (cv_property) {
case CV_CAP_PROP_BRIGHTNESS:
return VideoProcAmp_Brightness;
case CV_CAP_PROP_CONTRAST:
return VideoProcAmp_Contrast;
case CV_CAP_PROP_HUE:
return VideoProcAmp_Hue;
case CV_CAP_PROP_SATURATION:
return VideoProcAmp_Saturation;
case CV_CAP_PROP_SHARPNESS:
return VideoProcAmp_Sharpness;
case CV_CAP_PROP_GAMMA:
return VideoProcAmp_Gamma;
case CV_CAP_PROP_MONOCHROME:
return VideoProcAmp_ColorEnable;
case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
return VideoProcAmp_WhiteBalance;
case CV_CAP_PROP_BACKLIGHT:
return VideoProcAmp_BacklightCompensation;
case CV_CAP_PROP_GAIN:
return VideoProcAmp_Gain;
}
return -1;
}
int videoInput::getCameraPropertyFromCV(int cv_property){
switch (cv_property) {
case CV_CAP_PROP_PAN:
return CameraControl_Pan;
case CV_CAP_PROP_TILT:
return CameraControl_Tilt;
case CV_CAP_PROP_ROLL:
return CameraControl_Roll;
case CV_CAP_PROP_ZOOM:
return CameraControl_Zoom;
case CV_CAP_PROP_EXPOSURE:
return CameraControl_Exposure;
case CV_CAP_PROP_IRIS:
return CameraControl_Iris;
case CV_CAP_PROP_FOCUS:
return CameraControl_Focus;
}
return -1;
}
void videoInput::getCameraPropertyAsString(int prop, char * propertyAsString){
char tmpStr[16];
if ( prop==CameraControl_Pan) sprintf(tmpStr, "Pan");
else if ( prop==CameraControl_Tilt) sprintf(tmpStr, "Tilt");
else if ( prop==CameraControl_Roll) sprintf(tmpStr, "Roll");
else if ( prop==CameraControl_Zoom) sprintf(tmpStr, "Zoom");
else if ( prop==CameraControl_Exposure) sprintf(tmpStr, "Exposure");
else if ( prop==CameraControl_Iris) sprintf(tmpStr, "Iris");
else if ( prop==CameraControl_Focus) sprintf(tmpStr, "Focus");
else sprintf(tmpStr, "%u",prop);
memcpy(propertyAsString, tmpStr, sizeof(char)*16);
}
static void findClosestSizeAndSubtype(videoDevice * VD, int widthIn, int heightIn, int &widthOut, int &heightOut, GUID & mediatypeOut){
HRESULT hr;
int nearW = 9999999;
int nearH = 9999999;
int iCount = 0;
int iSize = 0;
hr = VD->streamConf->GetNumberOfCapabilities(&iCount, &iSize);
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = VD->streamConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr)){
int stepX = scc.OutputGranularityX;
int stepY = scc.OutputGranularityY;
int tempW = 999999;
int tempH = 999999;
if(stepX < 1 || stepY < 1) continue;
bool exactMatch = false;
bool exactMatchX = false;
bool exactMatchY = false;
for(int x = scc.MinOutputSize.cx; x <= scc.MaxOutputSize.cx; x+= stepX){
if( widthIn == x ){
exactMatchX = true;
tempW = x;
}
else if( abs(widthIn-x) < abs(widthIn-tempW) ){
tempW = x;
}
}
for(int y = scc.MinOutputSize.cy; y <= scc.MaxOutputSize.cy; y+= stepY){
if( heightIn == y){
exactMatchY = true;
tempH = y;
}
else if( abs(heightIn-y) < abs(heightIn-tempH) ){
tempH = y;
}
}
if(exactMatchX && exactMatchY){
exactMatch = true;
widthOut = widthIn;
heightOut = heightIn;
mediatypeOut = pmtConfig->subtype;
}
else if( abs(widthIn - tempW) + abs(heightIn - tempH) < abs(widthIn - nearW) + abs(heightIn - nearH) )
{
nearW = tempW;
nearH = tempH;
widthOut = nearW;
heightOut = nearH;
mediatypeOut = pmtConfig->subtype;
}
MyDeleteMediaType(pmtConfig);
if(exactMatch)break;
}
}
}
}
static bool setSizeAndSubtype(videoDevice * VD, int attemptWidth, int attemptHeight, GUID mediatype){
VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER*>(VD->pAmMediaType->pbFormat);
AM_MEDIA_TYPE * tmpType = NULL;
HRESULT hr = VD->streamConf->GetFormat(&tmpType);
if(hr != S_OK)return false;
HEADER(pVih)->biWidth = attemptWidth;
HEADER(pVih)->biHeight = attemptHeight;
VD->pAmMediaType->formattype = FORMAT_VideoInfo;
VD->pAmMediaType->majortype = MEDIATYPE_Video;
VD->pAmMediaType->subtype = mediatype;
if (mediatype == MEDIASUBTYPE_RGB24)
{
VD->pAmMediaType->lSampleSize = attemptWidth*attemptHeight*3;
}
else
{
VD->pAmMediaType->lSampleSize = 0;
}
if( VD->requestedFrameTime != -1){
pVih->AvgTimePerFrame = VD->requestedFrameTime;
}
hr = VD->streamConf->SetFormat(VD->pAmMediaType);
if(hr == S_OK){
if( tmpType != NULL )MyDeleteMediaType(tmpType);
return true;
}else{
VD->streamConf->SetFormat(tmpType);
if( tmpType != NULL )MyDeleteMediaType(tmpType);
}
return false;
}
int videoInput::start(int deviceID, videoDevice *VD){
HRESULT hr = NOERROR;
VD->myID = deviceID;
VD->setupStarted = true;
CAPTURE_MODE = PIN_CATEGORY_CAPTURE;
callbackSetCount = 1;
DebugPrintOut("SETUP: Setting up device %i\n",deviceID);
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&VD->pCaptureGraph);
if (FAILED(hr))
{
DebugPrintOut("ERROR - Could not create the Filter Graph Manager\n");
return hr;
}
hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&VD->pGraph);
if (FAILED(hr))
{
DebugPrintOut("ERROR - Could not add the graph builder!\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pCaptureGraph->SetFiltergraph(VD->pGraph);
if (FAILED(hr))
{
DebugPrintOut("ERROR - Could not set filtergraph\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGraph->QueryInterface(IID_IMediaControl, (void **)&VD->pControl);
if (FAILED(hr))
{
DebugPrintOut("ERROR - Could not create the Media Control object\n");
stopDevice(deviceID);
return hr;
}
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (SUCCEEDED(hr)){
DebugPrintOut("SETUP: %s\n", VD->nDeviceName);
hr = VD->pGraph->AddFilter(VD->pVideoInputFilter, VD->wDeviceName);
}else{
DebugPrintOut("ERROR - Could not find specified video device\n");
stopDevice(deviceID);
return hr;
}
IAMStreamConfig *streamConfTest = NULL;
hr = VD->pCaptureGraph->FindInterface(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, VD->pVideoInputFilter, IID_IAMStreamConfig, (void **)&streamConfTest);
if(FAILED(hr)){
DebugPrintOut("SETUP: Couldn't find preview pin using SmartTee\n");
}else{
CAPTURE_MODE = PIN_CATEGORY_PREVIEW;
streamConfTest->Release();
streamConfTest = NULL;
}
if(VD->useCrossbar)
{
DebugPrintOut("SETUP: Checking crossbar\n");
routeCrossbar(&VD->pCaptureGraph, &VD->pVideoInputFilter, VD->connection, CAPTURE_MODE);
}
hr = VD->pCaptureGraph->FindInterface(&CAPTURE_MODE, &MEDIATYPE_Video, VD->pVideoInputFilter, IID_IAMStreamConfig, (void **)&VD->streamConf);
if(FAILED(hr)){
DebugPrintOut("ERROR: Couldn't config the stream!\n");
stopDevice(deviceID);
return hr;
}
hr = VD->streamConf->GetFormat(&VD->pAmMediaType);
if(FAILED(hr)){
DebugPrintOut("ERROR: Couldn't getFormat for pAmMediaType!\n");
stopDevice(deviceID);
return hr;
}
VIDEOINFOHEADER *pVih = reinterpret_cast<VIDEOINFOHEADER*>(VD->pAmMediaType->pbFormat);
int currentWidth = HEADER(pVih)->biWidth;
int currentHeight = HEADER(pVih)->biHeight;
bool customSize = VD->tryDiffSize;
bool foundSize = false;
if(customSize){
DebugPrintOut("SETUP: Default Format is set to %ix%i\n", currentWidth, currentHeight);
char guidStr[8];
getMediaSubtypeAsString(VD->tryVideoType, guidStr);
DebugPrintOut("SETUP: trying specified format %s @ %ix%i\n", guidStr, VD->tryWidth, VD->tryHeight);
if( setSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, VD->tryVideoType) ){
VD->setSize(VD->tryWidth, VD->tryHeight);
VD->videoType = VD->tryVideoType;
foundSize = true;
} else {
for(int i = 0; i < VI_NUM_TYPES; i++){
getMediaSubtypeAsString(mediaSubtypes[i], guidStr);
DebugPrintOut("SETUP: trying format %s @ %ix%i\n", guidStr, VD->tryWidth, VD->tryHeight);
if( setSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, mediaSubtypes[i]) ){
VD->setSize(VD->tryWidth, VD->tryHeight);
VD->videoType = mediaSubtypes[i];
foundSize = true;
break;
}
}
}
if( foundSize == false ){
DebugPrintOut("SETUP: couldn't find requested size - searching for closest matching size\n");
int closestWidth = -1;
int closestHeight = -1;
GUID newMediaSubtype;
findClosestSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, closestWidth, closestHeight, newMediaSubtype);
if( closestWidth != -1 && closestHeight != -1){
getMediaSubtypeAsString(newMediaSubtype, guidStr);
DebugPrintOut("SETUP: closest supported size is %s @ %i %i\n", guidStr, closestWidth, closestHeight);
if( setSizeAndSubtype(VD, closestWidth, closestHeight, newMediaSubtype) ){
VD->setSize(closestWidth, closestHeight);
foundSize = true;
}
}
}
}
if(customSize == false || foundSize == false){
if( VD->requestedFrameTime != -1 ){
pVih->AvgTimePerFrame = VD->requestedFrameTime;
hr = VD->streamConf->SetFormat(VD->pAmMediaType);
}
VD->setSize(currentWidth, currentHeight);
}
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,IID_IBaseFilter, (void**)&VD->pGrabberF);
if (FAILED(hr)){
DebugPrintOut("Could not Create Sample Grabber - CoCreateInstance()\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGraph->AddFilter(VD->pGrabberF, L"Sample Grabber");
if (FAILED(hr)){
DebugPrintOut("Could not add Sample Grabber - AddFilter()\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGrabberF->QueryInterface(IID_ISampleGrabber, (void**)&VD->pGrabber);
if (FAILED(hr)){
DebugPrintOut("ERROR: Could not query SampleGrabber\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGrabber->SetOneShot(FALSE);
if(bCallback){
hr = VD->pGrabber->SetBufferSamples(FALSE);
}else{
hr = VD->pGrabber->SetBufferSamples(TRUE);
}
if(bCallback){
hr = VD->pGrabber->SetCallback(VD->sgCallback, 0);
if (FAILED(hr)){
DebugPrintOut("ERROR: problem setting callback\n");
stopDevice(deviceID);
return hr;
}else{
DebugPrintOut("SETUP: Capture callback set\n");
}
}
AM_MEDIA_TYPE mt;
ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
hr = VD->pGrabber->SetMediaType(&mt);
if(VD->streamConf){
VD->streamConf->Release();
VD->streamConf = NULL;
}else{
DebugPrintOut("ERROR: connecting device - prehaps it is already being used?\n");
stopDevice(deviceID);
return S_FALSE;
}
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&VD->pDestFilter));
if (FAILED(hr)){
DebugPrintOut("ERROR: Could not create filter - NullRenderer\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGraph->AddFilter(VD->pDestFilter, L"NullRenderer");
if (FAILED(hr)){
DebugPrintOut("ERROR: Could not add filter - NullRenderer\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, VD->pVideoInputFilter, VD->pGrabberF, VD->pDestFilter);
if (FAILED(hr)){
DebugPrintOut("ERROR: Could not connect pins - RenderStream()\n");
stopDevice(deviceID);
return hr;
}
{
IMediaFilter *pMediaFilter = 0;
hr = VD->pGraph->QueryInterface(IID_IMediaFilter, (void**)&pMediaFilter);
if (FAILED(hr)){
DebugPrintOut("ERROR: Could not get IID_IMediaFilter interface\n");
}else{
pMediaFilter->SetSyncSource(NULL);
pMediaFilter->Release();
}
}
hr = VD->pControl->Run();
if (FAILED(hr)){
DebugPrintOut("ERROR: Could not start graph\n");
stopDevice(deviceID);
return hr;
}
if(!bCallback){
long bufferSize = VD->videoSize;
while( hr != S_OK){
hr = VD->pGrabber->GetCurrentBuffer(&bufferSize, (long *)VD->pBuffer);
Sleep(10);
}
}
DebugPrintOut("SETUP: Device is setup and ready to capture.\n\n");
VD->readyToCapture = true;
VD->pVideoInputFilter->Release();
VD->pVideoInputFilter = NULL;
VD->pGrabberF->Release();
VD->pGrabberF = NULL;
VD->pDestFilter->Release();
VD->pDestFilter = NULL;
return S_OK;
}
int videoInput::getDeviceCount(){
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
int deviceCounter = 0;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
reinterpret_cast<void**>(&pDevEnum));
if (SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(
CLSID_VideoInputDeviceCategory,
&pEnum, 0);
if(hr == S_OK){
IMoniker *pMoniker = NULL;
while (pEnum->Next(1, &pMoniker, NULL) == S_OK){
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void**)(&pPropBag));
if (FAILED(hr)){
pMoniker->Release();
continue;
}
pPropBag->Release();
pPropBag = NULL;
pMoniker->Release();
pMoniker = NULL;
deviceCounter++;
}
pEnum->Release();
pEnum = NULL;
}
pDevEnum->Release();
pDevEnum = NULL;
}
return deviceCounter;
}
HRESULT videoInput::getDevice(IBaseFilter** gottaFilter, int deviceId, WCHAR * wDeviceName, char * nDeviceName){
BOOL done = false;
int deviceCounter = 0;
ICreateDevEnum *pSysDevEnum = NULL;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum);
if (FAILED(hr))
{
return hr;
}
IEnumMoniker *pEnumCat = NULL;
hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);
if (hr == S_OK)
{
IMoniker *pMoniker = NULL;
ULONG cFetched;
while ((pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) && (!done))
{
if(deviceCounter == deviceId)
{
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
if (SUCCEEDED(hr))
{
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr))
{
int count = 0;
while( varName.bstrVal[count] != 0x00 ) {
wDeviceName[count] = varName.bstrVal[count];
nDeviceName[count] = (char)varName.bstrVal[count];
count++;
}
hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)gottaFilter);
done = true;
}
VariantClear(&varName);
pPropBag->Release();
pPropBag = NULL;
pMoniker->Release();
pMoniker = NULL;
}
}
deviceCounter++;
}
pEnumCat->Release();
pEnumCat = NULL;
}
pSysDevEnum->Release();
pSysDevEnum = NULL;
if (done) {
return hr;
} else {
return VFW_E_NOT_FOUND;
}
}
HRESULT videoInput::ShowFilterPropertyPages(IBaseFilter *pFilter){
ISpecifyPropertyPages *pProp;
HRESULT hr = pFilter->QueryInterface(IID_ISpecifyPropertyPages, (void **)&pProp);
if (SUCCEEDED(hr))
{
FILTER_INFO FilterInfo;
hr = pFilter->QueryFilterInfo(&FilterInfo);
IUnknown *pFilterUnk;
pFilter->QueryInterface(IID_IUnknown, (void **)&pFilterUnk);
CAUUID caGUID;
pProp->GetPages(&caGUID);
pProp->Release();
OleCreatePropertyFrame(
NULL,
0, 0,
FilterInfo.achName,
1,
&pFilterUnk,
caGUID.cElems,
caGUID.pElems,
0,
0, NULL
);
if(pFilterUnk)pFilterUnk->Release();
if(FilterInfo.pGraph)FilterInfo.pGraph->Release();
CoTaskMemFree(caGUID.pElems);
}
return hr;
}
HRESULT videoInput::ShowStreamPropertyPages(IAMStreamConfig * ){
HRESULT hr = NOERROR;
return hr;
}
HRESULT videoInput::SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath) {
const WCHAR wszStreamName[] = L"ActiveMovieGraph";
HRESULT hr;
IStorage *pStorage = NULL;
hr = StgCreateDocfile(
wszPath,
STGM_CREATE | STGM_TRANSACTED | STGM_READWRITE | STGM_SHARE_EXCLUSIVE,
0, &pStorage);
if(FAILED(hr))
{
return hr;
}
IStream *pStream;
hr = pStorage->CreateStream(
wszStreamName,
STGM_WRITE | STGM_CREATE | STGM_SHARE_EXCLUSIVE,
0, 0, &pStream);
if (FAILED(hr))
{
pStorage->Release();
return hr;
}
IPersistStream *pPersist = NULL;
pGraph->QueryInterface(IID_IPersistStream, reinterpret_cast<void**>(&pPersist));
hr = pPersist->Save(pStream, TRUE);
pStream->Release();
pPersist->Release();
if (SUCCEEDED(hr))
{
hr = pStorage->Commit(STGC_DEFAULT);
}
pStorage->Release();
return hr;
}
HRESULT videoInput::routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter **pVidInFilter, int conType, GUID captureMode){
ICaptureGraphBuilder2 *pBuild = NULL;
pBuild = *ppBuild;
IBaseFilter *pVidFilter = NULL;
pVidFilter = * pVidInFilter;
IAMCrossbar *pXBar1 = NULL;
HRESULT hr = pBuild->FindInterface(&LOOK_UPSTREAM_ONLY, NULL, pVidFilter,
IID_IAMCrossbar, (void**)&pXBar1);
if (SUCCEEDED(hr))
{
bool foundDevice = false;
DebugPrintOut("SETUP: You are not a webcam! Setting Crossbar\n");
pXBar1->Release();
IAMCrossbar *Crossbar;
hr = pBuild->FindInterface(&captureMode, &MEDIATYPE_Interleaved, pVidFilter, IID_IAMCrossbar, (void **)&Crossbar);
if(hr != NOERROR){
hr = pBuild->FindInterface(&captureMode, &MEDIATYPE_Video, pVidFilter, IID_IAMCrossbar, (void **)&Crossbar);
}
LONG lInpin, lOutpin;
hr = Crossbar->get_PinCounts(&lOutpin , &lInpin);
BOOL iPin=TRUE; LONG pIndex=0 , pRIndex=0 , pType=0;
while( pIndex < lInpin)
{
hr = Crossbar->get_CrossbarPinInfo( iPin , pIndex , &pRIndex , &pType);
if( pType == conType){
DebugPrintOut("SETUP: Found Physical Interface");
switch(conType){
case PhysConn_Video_Composite:
DebugPrintOut(" - Composite\n");
break;
case PhysConn_Video_SVideo:
DebugPrintOut(" - S-Video\n");
break;
case PhysConn_Video_Tuner:
DebugPrintOut(" - Tuner\n");
break;
case PhysConn_Video_USB:
DebugPrintOut(" - USB\n");
break;
case PhysConn_Video_1394:
DebugPrintOut(" - Firewire\n");
break;
}
foundDevice = true;
break;
}
pIndex++;
}
if(foundDevice){
BOOL OPin=FALSE; LONG pOIndex=0 , pORIndex=0 , pOType=0;
while( pOIndex < lOutpin)
{
hr = Crossbar->get_CrossbarPinInfo( OPin , pOIndex , &pORIndex , &pOType);
if( pOType == PhysConn_Video_VideoDecoder)
break;
}
Crossbar->Route(pOIndex,pIndex);
}else{
DebugPrintOut("SETUP: Didn't find specified Physical Connection type. Using Defualt.\n");
}
if(pXBar1)pXBar1->Release();
if(pXBar1)pXBar1 = NULL;
}else{
DebugPrintOut("SETUP: You are a webcam or snazzy firewire cam! No Crossbar needed\n");
return hr;
}
return hr;
}
namespace cv
{
videoInput VideoCapture_DShow::g_VI;
VideoCapture_DShow::VideoCapture_DShow(int index)
: m_index(-1)
, m_width(-1)
, m_height(-1)
, m_fourcc(-1)
, m_widthSet(-1)
, m_heightSet(-1)
{
CoInitialize(0);
open(index);
}
VideoCapture_DShow::~VideoCapture_DShow()
{
close();
CoUninitialize();
}
double VideoCapture_DShow::getProperty(int propIdx) const
{
long min_value, max_value, stepping_delta, current_value, flags, defaultValue;
switch (propIdx)
{
case CV_CAP_PROP_FRAME_WIDTH:
return g_VI.getWidth(m_index);
case CV_CAP_PROP_FRAME_HEIGHT:
return g_VI.getHeight(m_index);
case CV_CAP_PROP_FOURCC:
return g_VI.getFourcc(m_index);
case CV_CAP_PROP_FPS:
return g_VI.getFPS(m_index);
case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_HUE:
case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_SHARPNESS:
case CV_CAP_PROP_GAMMA:
case CV_CAP_PROP_MONOCHROME:
case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
case CV_CAP_PROP_BACKLIGHT:
case CV_CAP_PROP_GAIN:
if (g_VI.getVideoSettingFilter(m_index, g_VI.getVideoPropertyFromCV(propIdx), min_value, max_value, stepping_delta, current_value, flags, defaultValue))
return (double)current_value;
case CV_CAP_PROP_PAN:
case CV_CAP_PROP_TILT:
case CV_CAP_PROP_ROLL:
case CV_CAP_PROP_ZOOM:
case CV_CAP_PROP_EXPOSURE:
case CV_CAP_PROP_IRIS:
case CV_CAP_PROP_FOCUS:
if (g_VI.getVideoSettingCamera(m_index, g_VI.getCameraPropertyFromCV(propIdx), min_value, max_value, stepping_delta, current_value, flags, defaultValue))
return (double)current_value;
}
return -1;
}
bool VideoCapture_DShow::setProperty(int propIdx, double propVal)
{
bool handled = false;
switch (propIdx)
{
case CV_CAP_PROP_FRAME_WIDTH:
m_width = cvRound(propVal);
handled = true;
break;
case CV_CAP_PROP_FRAME_HEIGHT:
m_height = cvRound(propVal);
handled = true;
break;
case CV_CAP_PROP_FOURCC:
m_fourcc = (int)(unsigned long)(propVal);
if (-1 == m_fourcc)
{
}
handled = true;
break;
case CV_CAP_PROP_FPS:
int fps = cvRound(propVal);
if (fps != g_VI.getFPS(m_index))
{
g_VI.stopDevice(m_index);
g_VI.setIdealFramerate(m_index, fps);
if (m_widthSet > 0 && m_heightSet > 0)
g_VI.setupDevice(m_index, m_widthSet, m_heightSet);
else
g_VI.setupDevice(m_index);
}
return g_VI.isDeviceSetup(m_index);
}
if (handled)
{
if (m_width > 0 && m_height > 0)
{
if (m_width != g_VI.getWidth(m_index) || m_height != g_VI.getHeight(m_index) )
{
int fps = static_cast<int>(g_VI.getFPS(m_index));
g_VI.stopDevice(m_index);
g_VI.setIdealFramerate(m_index, fps);
g_VI.setupDeviceFourcc(m_index, m_width, m_height, m_fourcc);
}
bool success = g_VI.isDeviceSetup(m_index);
if (success)
{
m_widthSet = m_width;
m_heightSet = m_height;
m_width = m_height = m_fourcc = -1;
}
return success;
}
return true;
}
if (propIdx == CV_CAP_PROP_SETTINGS )
{
g_VI.showSettingsWindow(m_index);
return true;
}
switch (propIdx)
{
case CV_CAP_PROP_BRIGHTNESS:
case CV_CAP_PROP_CONTRAST:
case CV_CAP_PROP_HUE:
case CV_CAP_PROP_SATURATION:
case CV_CAP_PROP_SHARPNESS:
case CV_CAP_PROP_GAMMA:
case CV_CAP_PROP_MONOCHROME:
case CV_CAP_PROP_WHITE_BALANCE_BLUE_U:
case CV_CAP_PROP_BACKLIGHT:
case CV_CAP_PROP_GAIN:
return g_VI.setVideoSettingFilter(m_index, g_VI.getVideoPropertyFromCV(propIdx), (long)propVal);
}
switch (propIdx)
{
case CV_CAP_PROP_PAN:
case CV_CAP_PROP_TILT:
case CV_CAP_PROP_ROLL:
case CV_CAP_PROP_ZOOM:
case CV_CAP_PROP_EXPOSURE:
case CV_CAP_PROP_IRIS:
case CV_CAP_PROP_FOCUS:
return g_VI.setVideoSettingCamera(m_index, g_VI.getCameraPropertyFromCV(propIdx), (long)propVal);
}
return false;
}
bool VideoCapture_DShow::grabFrame()
{
return true;
}
bool VideoCapture_DShow::retrieveFrame(int, OutputArray frame)
{
frame.create(Size(g_VI.getWidth(m_index), g_VI.getHeight(m_index)), CV_8UC3);
cv::Mat mat = frame.getMat();
return g_VI.getPixels(m_index, mat.ptr(), false, true );
}
int VideoCapture_DShow::getCaptureDomain()
{
return CV_CAP_DSHOW;
}
bool VideoCapture_DShow::isOpened() const
{
return (-1 != m_index);
}
void VideoCapture_DShow::open(int index)
{
close();
int devices = g_VI.listDevices(true);
if (0 == devices)
return;
if (index < 0 || index > devices-1)
return;
g_VI.setupDevice(index);
if (!g_VI.isDeviceSetup(index))
return;
m_index = index;
}
void VideoCapture_DShow::close()
{
if (m_index >= 0)
{
g_VI.stopDevice(m_index);
m_index = -1;
}
m_widthSet = m_heightSet = m_width = m_height = -1;
}
}
#endif