This source file includes following definitions.
- init
- init
- close
- grabFrame
- retrieveFrame
- isPipelinePlaying
- startPipeline
- stopPipeline
- restartPipeline
- setFilter
- removeFilter
- newPad
- open
- getProperty
- setProperty
- cvCreateCapture_GStreamer
- init
- close
- filenameToMimetype
- open
- writeFrame
- cvCreateVideoWriter_GStreamer
- toFraction
- handleMessage
#include "precomp.hpp"
#include <unistd.h>
#include <string.h>
#include <gst/gst.h>
#include <gst/gstbuffer.h>
#include <gst/video/video.h>
#include <gst/app/gstappsink.h>
#include <gst/app/gstappsrc.h>
#include <gst/riff/riff-media.h>
#include <gst/pbutils/missing-plugins.h>
#define VERSION_NUM(major, minor, micro) (major * 1000000 + minor * 1000 + micro)
#define FULL_GST_VERSION VERSION_NUM(GST_VERSION_MAJOR, GST_VERSION_MINOR, GST_VERSION_MICRO)
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
#include <gst/pbutils/encoding-profile.h>
#endif
#ifdef NDEBUG
#define CV_WARN(message)
#else
#define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
#endif
#if GST_VERSION_MAJOR == 0
#define COLOR_ELEM "ffmpegcolorspace"
#elif FULL_GST_VERSION < VERSION_NUM(1,5,0)
#define COLOR_ELEM "videoconvert"
#else
#define COLOR_ELEM "autovideoconvert"
#endif
void toFraction(double decimal, double &numerator, double &denominator);
void handleMessage(GstElement * pipeline);
static cv::Mutex gst_initializer_mutex;
class gst_initializer
{
public:
    static void init()
    {
        gst_initializer_mutex.lock();
        static gst_initializer init;
        gst_initializer_mutex.unlock();
    }
private:
    gst_initializer()
    {
        gst_init(NULL, NULL);
    }
};
class CvCapture_GStreamer : public CvCapture
{
public:
    CvCapture_GStreamer() { init(); }
    virtual ~CvCapture_GStreamer() { close(); }
    virtual bool open( int type, const char* filename );
    virtual void close();
    virtual double getProperty(int) const;
    virtual bool setProperty(int, double);
    virtual bool grabFrame();
    virtual IplImage* retrieveFrame(int);
protected:
    void init();
    bool reopen();
    bool isPipelinePlaying();
    void startPipeline();
    void stopPipeline();
    void restartPipeline();
    void setFilter(const char* prop, int type, int v1, int v2 = 0);
    void removeFilter(const char *filter);
    static void newPad(GstElement *myelement,
                       GstPad     *pad,
                       gpointer    data);
    GstElement*   pipeline;
    GstElement*   uridecodebin;
    GstElement*   color;
    GstElement*   sink;
#if GST_VERSION_MAJOR > 0
    GstSample*    sample;
    GstMapInfo*   info;
#endif
    GstBuffer*    buffer;
    GstCaps*      caps;
    IplImage*     frame;
    gint64        duration;
    gint          width;
    gint          height;
    double        fps;
};
void CvCapture_GStreamer::init()
{
    pipeline = NULL;
    uridecodebin = NULL;
    color = NULL;
    sink = NULL;
#if GST_VERSION_MAJOR > 0
    sample = NULL;
    info = new GstMapInfo;
#endif
    buffer = NULL;
    caps = NULL;
    frame = NULL;
    duration = -1;
    width = -1;
    height = -1;
    fps = -1;
}
void CvCapture_GStreamer::close()
{
    if (isPipelinePlaying())
        this->stopPipeline();
    if(pipeline) {
        gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
        gst_object_unref(GST_OBJECT(pipeline));
        pipeline = NULL;
    }
    duration = -1;
    width = -1;
    height = -1;
    fps = -1;
}
bool CvCapture_GStreamer::grabFrame()
{
    if(!pipeline)
        return false;
    
    if(!this->isPipelinePlaying())
        this->startPipeline();
    
    if(gst_app_sink_is_eos(GST_APP_SINK(sink)))
        return false;
#if GST_VERSION_MAJOR == 0
    if(buffer)
        gst_buffer_unref(buffer);
    buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
#else
    if(sample)
        gst_sample_unref(sample);
    sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
    if(!sample)
        return false;
    buffer = gst_sample_get_buffer(sample);
#endif
    if(!buffer)
        return false;
    return true;
}
IplImage * CvCapture_GStreamer::retrieveFrame(int)
{
    if(!buffer)
        return 0;
    
    if(!frame)
    {
#if GST_VERSION_MAJOR == 0
        GstCaps* buffer_caps = gst_buffer_get_caps(buffer);
#else
        GstCaps* buffer_caps = gst_sample_get_caps(sample);
#endif
        
        assert(gst_caps_get_size(buffer_caps) == 1);
        GstStructure* structure = gst_caps_get_structure(buffer_caps, 0);
        
        if(!gst_structure_get_int(structure, "width", &width) ||
                !gst_structure_get_int(structure, "height", &height))
        {
            gst_caps_unref(buffer_caps);
            return 0;
        }
        int depth = 3;
#if GST_VERSION_MAJOR > 0
        depth = 0;
        const gchar* name = gst_structure_get_name(structure);
        const gchar* format = gst_structure_get_string(structure, "format");
        if (!name || !format)
            return 0;
        
        
        
        
        
        
        if (strcasecmp(name, "video/x-raw") == 0)
        {
            if (strcasecmp(format, "BGR") == 0) {
                depth = 3;
            }
            else if(strcasecmp(format, "GRAY8") == 0){
                depth = 1;
            }
        }
        else if (strcasecmp(name, "video/x-bayer") == 0)
        {
            depth = 1;
        }
#endif
        if (depth > 0) {
            frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, depth);
        } else {
            gst_caps_unref(buffer_caps);
            return 0;
        }
        gst_caps_unref(buffer_caps);
    }
    
    
#if GST_VERSION_MAJOR == 0
    frame->imageData = (char *)GST_BUFFER_DATA(buffer);
#else
    
    
    gboolean success = gst_buffer_map(buffer,info, (GstMapFlags)GST_MAP_READ);
    if (!success){
        
        
        return 0;
    }
    frame->imageData = (char*)info->data;
    gst_buffer_unmap(buffer,info);
#endif
    return frame;
}
bool CvCapture_GStreamer::isPipelinePlaying()
{
    GstState current, pending;
    GstClockTime timeout = 5*GST_SECOND;
    if(!GST_IS_ELEMENT(pipeline)){
        return false;
    }
    GstStateChangeReturn ret = gst_element_get_state(GST_ELEMENT(pipeline),¤t, &pending, timeout);
    if (!ret){
        
        return false;
    }
    return current == GST_STATE_PLAYING;
}
void CvCapture_GStreamer::startPipeline()
{
    CV_FUNCNAME("icvStartPipeline");
    __BEGIN__;
    
    GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if (status == GST_STATE_CHANGE_ASYNC)
    {
        
        GstState st1;
        GstState st2;
        status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        handleMessage(pipeline);
        gst_object_unref(pipeline);
        pipeline = NULL;
        CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
        return;
    }
    
    handleMessage(pipeline);
    __END__;
}
void CvCapture_GStreamer::stopPipeline()
{
    CV_FUNCNAME("icvStopPipeline");
    __BEGIN__;
    
    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_ERROR(CV_StsError, "GStreamer: unable to stop pipeline\n");
        gst_object_unref(pipeline);
        pipeline = NULL;
        return;
    }
    __END__;
}
void CvCapture_GStreamer::restartPipeline()
{
    handleMessage(pipeline);
    this->stopPipeline();
    this->startPipeline();
}
void CvCapture_GStreamer::setFilter(const char *prop, int type, int v1, int v2)
{
    
    if(!caps || !( GST_IS_CAPS (caps) ))
    {
        if(type == G_TYPE_INT)
        {
#if GST_VERSION_MAJOR == 0
            caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL);
#else
            caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL);
#endif
        }
        else
        {
#if GST_VERSION_MAJOR == 0
            caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL);
#else
            caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL);
#endif
        }
    }
    else
    {
#if GST_VERSION_MAJOR > 0
        if (! gst_caps_is_writable(caps))
            caps = gst_caps_make_writable (caps);
#endif
        if(type == G_TYPE_INT){
            gst_caps_set_simple(caps, prop, type, v1, NULL);
        }else{
            gst_caps_set_simple(caps, prop, type, v1, v2, NULL);
        }
    }
#if GST_VERSION_MAJOR > 0
    caps = gst_caps_fixate(caps);
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    
}
void CvCapture_GStreamer::removeFilter(const char *filter)
{
    if(!caps)
        return;
#if GST_VERSION_MAJOR > 0
    if (! gst_caps_is_writable(caps))
        caps = gst_caps_make_writable (caps);
#endif
    GstStructure *s = gst_caps_get_structure(caps, 0);
    gst_structure_remove_field(s, filter);
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
}
void CvCapture_GStreamer::newPad(GstElement * ,
                                 GstPad     *pad,
                                 gpointer    data)
{
    GstPad *sinkpad;
    GstElement *color = (GstElement *) data;
    sinkpad = gst_element_get_static_pad (color, "sink");
    if (!sinkpad){
        
        return;
    }
    gst_pad_link (pad, sinkpad);
    gst_object_unref (sinkpad);
}
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");
    __BEGIN__;
    gst_initializer::init();
    bool file = false;
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    GstElementFactory * testfac;
    GstStateChangeReturn status;
    if (type == CV_CAP_GSTREAMER_V4L){
        testfac = gst_element_factory_find("v4lsrc");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4lsrc ! " COLOR_ELEM " ! appsink";
    }
    if (type == CV_CAP_GSTREAMER_V4L2){
        testfac = gst_element_factory_find("v4l2src");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4l2src ! " COLOR_ELEM " ! appsink";
    }
    
    
    
    
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(uri)
            {
                file = true;
            }
            else
            {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin)
            {
                fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }
    bool element_from_uri = false;
    if(!uridecodebin)
    {
        
        
        
        
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
#if GST_VERSION_MAJOR == 0
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
#else
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
#endif
            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL);
        }
        g_free(protocol);
        if(!uridecodebin) {
            
            close();
            return false;
        }
    }
    if(manualpipeline)
    {
        GstIterator *it = NULL;
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));
        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;
        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                  sink = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);
              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);
        if (!sink){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#endif
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new(NULL);
        
        
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);
        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                pipeline = NULL;
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }
        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            pipeline = NULL;
            return false;
        }
    }
    
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);
#if GST_VERSION_MAJOR == 0
    caps = gst_caps_new_simple("video/x-raw-rgb",
                               "bpp",        G_TYPE_INT, 24,
                               "red_mask",   G_TYPE_INT, 0x0000FF,
                               "green_mask", G_TYPE_INT, 0x00FF00,
                               "blue_mask",  G_TYPE_INT, 0xFF0000,
                               NULL);
#else
    
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);
    
    if (file)
    {
        status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED);
        if (status == GST_STATE_CHANGE_ASYNC)
        {
            
            GstState st1;
            GstState st2;
            status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
        }
        if (status == GST_STATE_CHANGE_FAILURE)
        {
            handleMessage(pipeline);
            gst_object_unref(pipeline);
            pipeline = NULL;
            CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
            return false;
        }
        GstFormat format;
        format = GST_FORMAT_DEFAULT;
#if GST_VERSION_MAJOR == 0
        if(!gst_element_query_duration(sink, &format, &duration))
#else
        if(!gst_element_query_duration(sink, format, &duration))
#endif
        {
            handleMessage(pipeline);
            CV_WARN("GStreamer: unable to query duration of stream");
            duration = -1;
        }
        GstPad* pad = gst_element_get_static_pad(color, "src");
#if GST_VERSION_MAJOR == 0
        GstCaps* buffer_caps = gst_pad_get_caps(pad);
#else
        GstCaps* buffer_caps = gst_pad_get_current_caps(pad);
#endif
        const GstStructure *structure = gst_caps_get_structure (buffer_caps, 0);
        if (!gst_structure_get_int (structure, "width", &width))
        {
            CV_WARN("Cannot query video width\n");
        }
        if (!gst_structure_get_int (structure, "height", &height))
        {
            CV_WARN("Cannot query video heigth\n");
        }
        gint num = 0, denom=1;
        if(!gst_structure_get_fraction(structure, "framerate", &num, &denom))
        {
            CV_WARN("Cannot query video fps\n");
        }
        fps = (double)num/(double)denom;
         
    }
    else
    {
        duration = -1;
        width = -1;
        height = -1;
        fps = -1;
    }
    __END__;
    return true;
}
double CvCapture_GStreamer::getProperty( int propId ) const
{
    GstFormat format;
    gint64 value;
    gboolean status;
#if GST_VERSION_MAJOR == 0
#define FORMAT &format
#else
#define FORMAT format
#endif
    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }
    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        status = gst_element_query_position(sink, FORMAT, &value);
        if(!status) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value * 1e-6; 
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        status = gst_element_query_position(sink, FORMAT, &value);
        if(!status) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return value;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        status = gst_element_query_position(sink, FORMAT, &value);
        if(!status) {
            CV_WARN("GStreamer: unable to query position of stream");
            return false;
        }
        return ((double) value) / GST_FORMAT_PERCENT_MAX;
    case CV_CAP_PROP_FRAME_WIDTH:
        return width;
    case CV_CAP_PROP_FRAME_HEIGHT:
        return height;
    case CV_CAP_PROP_FPS:
        return fps;
    case CV_CAP_PROP_FOURCC:
        break;
    case CV_CAP_PROP_FRAME_COUNT:
        return duration;
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink) {
            CV_WARN("GStreamer: there is no sink yet");
            return false;
        }
        return gst_app_sink_get_max_buffers(GST_APP_SINK(sink));
    default:
        CV_WARN("GStreamer: unhandled property");
        break;
    }
#undef FORMAT
    return false;
}
bool CvCapture_GStreamer::setProperty( int propId, double value )
{
    GstFormat format;
    GstSeekFlags flags;
    if(!pipeline) {
        CV_WARN("GStreamer: no pipeline");
        return false;
    }
    bool wasPlaying = this->isPipelinePlaying();
    if (wasPlaying)
        this->stopPipeline();
    switch(propId) {
    case CV_CAP_PROP_POS_MSEC:
        format = GST_FORMAT_TIME;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                    flags, (gint64) (value * GST_MSECOND))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_FRAMES:
        format = GST_FORMAT_DEFAULT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                    flags, (gint64) value)) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_POS_AVI_RATIO:
        format = GST_FORMAT_PERCENT;
        flags = (GstSeekFlags) (GST_SEEK_FLAG_FLUSH|GST_SEEK_FLAG_ACCURATE);
        if(!gst_element_seek_simple(GST_ELEMENT(pipeline), format,
                                    flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) {
            CV_WARN("GStreamer: unable to seek");
        }
        break;
    case CV_CAP_PROP_FRAME_WIDTH:
        if(value > 0)
            setFilter("width", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("width");
        break;
    case CV_CAP_PROP_FRAME_HEIGHT:
        if(value > 0)
            setFilter("height", G_TYPE_INT, (int) value, 0);
        else
            removeFilter("height");
        break;
    case CV_CAP_PROP_FPS:
        if(value > 0) {
            double num=0, denom = 1;
            toFraction(value, num,  denom);
            setFilter("framerate", GST_TYPE_FRACTION, value, denom);
        } else
            removeFilter("framerate");
        break;
    case CV_CAP_PROP_FOURCC:
    case CV_CAP_PROP_FRAME_COUNT:
    case CV_CAP_PROP_FORMAT:
    case CV_CAP_PROP_MODE:
    case CV_CAP_PROP_BRIGHTNESS:
    case CV_CAP_PROP_CONTRAST:
    case CV_CAP_PROP_SATURATION:
    case CV_CAP_PROP_HUE:
    case CV_CAP_PROP_GAIN:
    case CV_CAP_PROP_CONVERT_RGB:
        break;
    case CV_CAP_GSTREAMER_QUEUE_LENGTH:
        if(!sink)
            break;
        gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value);
        break;
    default:
        CV_WARN("GStreamer: unhandled property");
    }
    if (wasPlaying)
        this->startPipeline();
    return false;
}
CvCapture* cvCreateCapture_GStreamer(int type, const char* filename )
{
    CvCapture_GStreamer* capture = new CvCapture_GStreamer;
    if( capture->open( type, filename ))
        return capture;
    delete capture;
    return 0;
}
class CvVideoWriter_GStreamer : public CvVideoWriter
{
public:
    CvVideoWriter_GStreamer() { init(); }
    virtual ~CvVideoWriter_GStreamer() { close(); }
    virtual bool open( const char* filename, int fourcc,
                       double fps, CvSize frameSize, bool isColor );
    virtual void close();
    virtual bool writeFrame( const IplImage* image );
protected:
    void init();
    const char* filenameToMimetype(const char* filename);
    GstElement* pipeline;
    GstElement* source;
    GstElement* encodebin;
    GstElement* file;
    GstBuffer* buffer;
    int input_pix_fmt;
    int num_frames;
    double framerate;
};
void CvVideoWriter_GStreamer::init()
{
    pipeline = NULL;
    source = NULL;
    encodebin = NULL;
    file = NULL;
    buffer = NULL;
    num_frames = 0;
    framerate = 0;
}
void CvVideoWriter_GStreamer::close()
{
    GstStateChangeReturn status;
    if (pipeline)
    {
        handleMessage(pipeline);
        if (gst_app_src_end_of_stream(GST_APP_SRC(source)) != GST_FLOW_OK)
        {
            CV_WARN("Cannot send EOS to GStreamer pipeline\n");
            return;
        }
        
        GstBus* bus = gst_element_get_bus(pipeline);
        GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
        if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR)
        {
            CV_WARN("Error during VideoWriter finalization\n");
            return;
        }
        if(msg != NULL)
        {
            gst_message_unref(msg);
            g_object_unref(G_OBJECT(bus));
        }
        status = gst_element_set_state (pipeline, GST_STATE_NULL);
        if (status == GST_STATE_CHANGE_ASYNC)
        {
            
            GstState st1;
            GstState st2;
            status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
        }
        if (status == GST_STATE_CHANGE_FAILURE)
        {
            handleMessage (pipeline);
            gst_object_unref (GST_OBJECT (pipeline));
            pipeline = NULL;
            CV_WARN("Unable to stop gstreamer pipeline\n");
            return;
        }
        gst_object_unref (GST_OBJECT (pipeline));
        pipeline = NULL;
    }
}
const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
{
    
    const char *ext = strrchr(filename, '.');
    if(!ext || ext == filename) return NULL;
    ext += 1; 
    
    
    
    if (strncasecmp(ext,"avi", 3) == 0)
        return (const char*)"video/x-msvideo";
    if (strncasecmp(ext,"mkv", 3) == 0 || strncasecmp(ext,"mk3d",4) == 0  || strncasecmp(ext,"webm",4) == 0 )
        return (const char*)"video/x-matroska";
    if (strncasecmp(ext,"wmv", 3) == 0)
        return (const char*)"video/x-ms-asf";
    if (strncasecmp(ext,"mov", 3) == 0)
        return (const char*)"video/x-quicktime";
    if (strncasecmp(ext,"ogg", 3) == 0 || strncasecmp(ext,"ogv", 3) == 0)
        return (const char*)"application/ogg";
    if (strncasecmp(ext,"rm", 3) == 0)
        return (const char*)"vnd.rn-realmedia";
    if (strncasecmp(ext,"swf", 3) == 0)
        return (const char*)"application/x-shockwave-flash";
    if (strncasecmp(ext,"mp4", 3) == 0)
        return (const char*)"video/x-quicktime, variant=(string)iso";
    
    return (const char*)"video/x-msvideo";
}
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
                                    double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");
    
    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);
    
    gst_initializer::init();
    
    bool manualpipeline = true;
    int  bufsize = 0;
    GError *err = NULL;
    const char* mime = NULL;
    GstStateChangeReturn stateret;
    GstCaps* caps = NULL;
    GstCaps* videocaps = NULL;
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
    GstCaps* containercaps = NULL;
    GstEncodingContainerProfile* containerprofile = NULL;
    GstEncodingVideoProfile* videoprofile = NULL;
#endif
    GstIterator* it = NULL;
    gboolean done = FALSE;
    GstElement *element = NULL;
    gchar* name = NULL;
#if GST_VERSION_MAJOR == 0
    GstElement* splitter = NULL;
    GstElement* combiner = NULL;
#endif
    
    
    __BEGIN__;
    encodebin = gst_parse_launch(filename, &err);
    manualpipeline = (encodebin != NULL);
    if(manualpipeline)
    {
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sources(GST_BIN(encodebin));
        if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sources (GST_BIN(encodebin));
        GValue value = G_VALUE_INIT;
        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
                  source = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);
              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);
        if (!source){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
            return false;
        }
#endif
        pipeline = encodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);
        
        
        
        
        
        if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
        if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
        if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');
        
        videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
        if (!videocaps){
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
        }
        
        mime = filenameToMimetype(filename);
        if (!mime) {
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
        }
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        containercaps = gst_caps_from_string(mime);
        
        containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
        videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
        gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
#endif
        
        encodebin = gst_element_factory_make("encodebin", NULL);
#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
#endif
        source = gst_element_factory_make("appsrc", NULL);
        file = gst_element_factory_make("filesink", NULL);
        g_object_set(G_OBJECT(file), "location", filename, NULL);
    }
    if (is_color)
    {
        input_pix_fmt = GST_VIDEO_FORMAT_BGR;
        bufsize = frameSize.width * frameSize.height * 3;
#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);
#endif
    }
    else
    {
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
        input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
        bufsize = frameSize.width * frameSize.height;
#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "GRAY8",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);
#endif
#else
        CV_Assert(!"Gstreamer 0.10.29 or newer is required for grayscale input");
#endif
    }
    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
    gst_app_src_set_size (GST_APP_SRC(source), -1);
    g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(source), "block", 1, NULL);
    g_object_set(G_OBJECT(source), "is-live", 0, NULL);
    if(!manualpipeline)
    {
        g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
        gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
        if(!gst_element_link_many(source, encodebin, file, NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }
#if GST_VERSION_MAJOR == 0
    
    
    
    
    it = gst_bin_iterate_recurse (GST_BIN(encodebin));
    while (!done) {
      switch (gst_iterator_next (it, (void**)&element)) {
        case GST_ITERATOR_OK:
          name = gst_element_get_name(element);
          if (strstr(name, "streamsplitter"))
            splitter = element;
          else if (strstr(name, "streamcombiner"))
            combiner = element;
          break;
        case GST_ITERATOR_RESYNC:
          gst_iterator_resync (it);
          break;
        case GST_ITERATOR_ERROR:
          done = true;
          break;
        case GST_ITERATOR_DONE:
          done = true;
          break;
      }
    }
    gst_iterator_free (it);
    if (splitter && combiner)
    {
        gst_element_unlink(splitter, combiner);
        GstPad* src  = gst_element_get_pad(combiner, "src");
        GstPad* sink = gst_element_get_pad(combiner, "encodingsink");
        GstPad* srcPeer = gst_pad_get_peer(src);
        GstPad* sinkPeer = gst_pad_get_peer(sink);
        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);
        gst_pad_link(sinkPeer, srcPeer);
        src = gst_element_get_pad(splitter, "encodingsrc");
        sink = gst_element_get_pad(splitter, "sink");
        srcPeer = gst_pad_get_peer(src);
        sinkPeer = gst_pad_get_peer(sink);
        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);
        gst_pad_link(sinkPeer, srcPeer);
    }
#endif
    stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if(stateret  == GST_STATE_CHANGE_FAILURE) {
        handleMessage(pipeline);
        CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }
    framerate = fps;
    num_frames = 0;
    handleMessage(pipeline);
    __END__;
    return true;
}
bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::writerFrame");
    GstClockTime duration, timestamp;
    GstFlowReturn ret;
    int size;
    __BEGIN__;
    handleMessage(pipeline);
    if (input_pix_fmt == GST_VIDEO_FORMAT_BGR) {
        if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U) {
            CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 3.");
        }
    }
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
    else if (input_pix_fmt == GST_VIDEO_FORMAT_GRAY8) {
        if (image->nChannels != 1 || image->depth != IPL_DEPTH_8U) {
            CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs images with depth = IPL_DEPTH_8U and nChannels = 1.");
        }
    }
#endif
    else {
        CV_ERROR(CV_StsUnsupportedFormat, "cvWriteFrame() needs BGR or grayscale images\n");
        return false;
    }
    size = image->imageSize;
    duration = ((double)1/framerate) * GST_SECOND;
    timestamp = num_frames * duration;
    
#if GST_VERSION_MAJOR == 0
    buffer = gst_buffer_try_new_and_alloc (size);
    if (!buffer)
    {
        CV_ERROR(CV_StsBadSize, "Cannot create GStreamer buffer");
    }
    memcpy(GST_BUFFER_DATA (buffer), (guint8*)image->imageData, size);
    GST_BUFFER_DURATION(buffer) = duration;
    GST_BUFFER_TIMESTAMP(buffer) = timestamp;
#else
    buffer = gst_buffer_new_allocate (NULL, size, NULL);
    GstMapInfo info;
    gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ);
    memcpy(info.data, (guint8*)image->imageData, size);
    gst_buffer_unmap(buffer, &info);
    GST_BUFFER_DURATION(buffer) = duration;
    GST_BUFFER_PTS(buffer) = timestamp;
    GST_BUFFER_DTS(buffer) = timestamp;
#endif
    
    GST_BUFFER_OFFSET(buffer) =  num_frames;
    ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
    if (ret != GST_FLOW_OK) {
        CV_WARN("Error pushing buffer to GStreamer pipeline");
        return false;
    }
    
    ++num_frames;
    __END__;
    return true;
}
CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps,
                                             CvSize frameSize, int isColor )
{
    CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer;
    if( wrt->open(filename, fourcc, fps,frameSize, isColor))
        return wrt;
    delete wrt;
    return 0;
}
void toFraction(double decimal, double &numerator, double &denominator)
{
    double dummy;
    double whole;
    decimal = modf (decimal, &whole);
    for (denominator = 1; denominator<=100; denominator++){
        if (modf(denominator * decimal, &dummy) < 0.001f)
            break;
    }
    numerator = denominator * decimal;
}
void handleMessage(GstElement * pipeline)
{
    CV_FUNCNAME("handlemessage");
    GError *err = NULL;
    gchar *debug = NULL;
    GstBus* bus = NULL;
    GstStreamStatusType tp;
    GstElement * elem = NULL;
    GstMessage* msg  = NULL;
    __BEGIN__;
    bus = gst_element_get_bus(pipeline);
    while(gst_bus_have_pending(bus)) {
        msg = gst_bus_pop(bus);
        
        if(gst_is_missing_plugin_message(msg))
        {
            CV_ERROR(CV_StsError, "GStreamer: your gstreamer installation is missing a required plugin\n");
        }
        else
        {
            switch (GST_MESSAGE_TYPE (msg)) {
            case GST_MESSAGE_STATE_CHANGED:
                GstState oldstate, newstate, pendstate;
                gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate);
                
                
                break;
            case GST_MESSAGE_ERROR:
                gst_message_parse_error(msg, &err, &debug);
                fprintf(stderr, "GStreamer Plugin: Embedded video playback halted; module %s reported: %s\n",
                                gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message);
                g_error_free(err);
                g_free(debug);
                gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
                break;
            case GST_MESSAGE_EOS:
                
                break;
            case GST_MESSAGE_STREAM_STATUS:
                gst_message_parse_stream_status(msg,&tp,&elem);
                
                break;
            default:
                
                break;
            }
        }
        gst_message_unref(msg);
    }
    gst_object_unref(GST_OBJECT(bus));
    __END__
}