Tags: opencv, gstreamer
by Sanchayan Maity
Not much to write or say in this post. I was trying to extract a frame from the gstreamer pipeline and then display it with OpenCV.
There are two approaches in the code below.
- Register a callback function whenever a new buffer becomes available with appsink and then use a locking mechanism to synchronize the extraction of the frame and display in the main thread.
- The second one is to extract the buffer yourself in a while loop in the main thread.
The first one is active in the code below and the second one commented out. To enable the first mechanism, uncomment the mutex locking and signal connect mechanism and comment out the pull buffer call related stuff in the while loop.
Learn more about gstreamer from Gstreamer manual and especially refer section 19.
For some reason, I am experiencing a memory leak issue with the below code (more so with the fist approach) and haven’t got around and being able to fix it. Also, for your platform the gstreamer pipeline elements will be different. Another problem was, I get x-raw-yuv data from my gstreamer source element and I am only able to display the black and white image with OpenCV. Nonetheless, I thought this might be useful and may be someone can also point out the error to me. Not a gstreamer expert by any means.
#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include "gstreamer-0.10/gst/gst.h"
#include "gstreamer-0.10/gst/gstelement.h"
#include "gstreamer-0.10/gst/app/gstappsink.h"
#include "iostream"
#include "stdio.h"
#include "unistd.h"
#include "pthread.h"
#include "X11/Xlib.h"
#include "X11/Xutil.h"
;
using namespace std;
using namespace cv
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData
{
*appsink;
GstElement *colorSpace;
GstElement *pipeline;
GstElement *vsource_capsfilter, *mixercsp_capsfilter, *cspappsink_capsfilter;
GstElement *mixer_capsfilter;
GstElement *bin_capture;
GstElement *video_source, *deinterlace;
GstElement *nv_video_mixer;
GstElement *pad;
GstPad *srcdeinterlace_caps, *mixercsp_caps, *cspappsink_caps;
GstCaps *bus;
GstBus *msg;
GstMessage }gstData;
* buffer;
GstBuffer
= PTHREAD_MUTEX_INITIALIZER;
pthread_mutex_t threadMutex = PTHREAD_COND_INITIALIZER;
pthread_cond_t waitForGstBuffer
/* Global variables */
;
CascadeClassifier face_cascade*frame = NULL;
IplImage = "Toradex Face Detection Demo";
string window_name = "/home/root/haarcascade_frontalface_alt2.xml";
String face_cascade_name const int BORDER = 8; // Border between GUI elements to the edge of the image.
<typename T> string toString(T t)
template {
;
ostringstream out<< t;
out return out.str();
}
// Draw text into an image. Defaults to top-left-justified text, but you can give negative x coords for right-justified text,
// and/or negative y coords for bottom-justified text
// Returns the bounding rect around the drawn text
(Mat img, string text, Point coord, Scalar color, float fontScale = 0.6f, int thickness = 1, int fontFace = FONT_HERSHEY_COMPLEX)
Rect drawString{
// Get the text size & baseline.
int baseline = 0;
= getTextSize(text, fontFace, fontScale, thickness, &baseline);
Size textSize += thickness;
baseline
// Adjust the coords for left/right-justified or top/bottom-justified.
if (coord.y >= 0) {
// Coordinates are for the top-left corner of the text from the top-left of the image, so move down by one row.
.y += textSize.height;
coord}
else {
// Coordinates are for the bottom-left corner of the text from the bottom-left of the image, so come up from the bottom.
.y += img.rows - baseline + 1;
coord}
// Become right-justified if desired.
if (coord.x < 0) {
.x += img.cols - textSize.width + 1;
coord}
// Get the bounding box around the text.
= Rect(coord.x, coord.y - textSize.height, textSize.width, baseline + textSize.height);
Rect boundingRect
// Draw anti-aliased text.
(img, text, coord, fontFace, fontScale, color, thickness, CV_AA);
putText
// Let the user know how big their text is, in case they want to arrange things.
return boundingRect;
}
void create_pipeline(gstData *data)
{
->pipeline = gst_pipeline_new ("pipeline");
data(data->pipeline, GST_STATE_NULL);
gst_element_set_state }
(GstAppSink *sink, gstData *data)
gboolean CaptureGstBuffer{
//g_signal_emit_by_name (sink, "pull-buffer", &buffer);
(&threadMutex);
pthread_mutex_lock= gst_app_sink_pull_buffer(sink);
buffer (buffer)
if {
= cvCreateImage(cvSize(720, 576), IPL_DEPTH_16U, 3);
frame (frame == NULL)
if {
("IplImageFrame is null.\n");
g_printerr}
else{
//buffer = gst_app_sink_pull_buffer(sink);
->imageData = (char*)GST_BUFFER_DATA(buffer);
frame(frame->imageData == NULL)
if {
("IplImage data is null.\n");
g_printerr}
}
(&waitForGstBuffer);
pthread_cond_signal}
(&threadMutex);
pthread_mutex_unlock;
return TRUE}
(gstData *data)
gboolean init_video_capture{
->video_source = gst_element_factory_make("v4l2src", "video_source_live");
data->vsource_capsfilter = gst_element_factory_make ("capsfilter", "vsource_cptr_capsfilter");
data->deinterlace = gst_element_factory_make("deinterlace", "deinterlace_live");
data->nv_video_mixer = gst_element_factory_make("nv_omx_videomixer", "nv_video_mixer_capture");
data->mixercsp_capsfilter = gst_element_factory_make ("capsfilter", "mixercsp_capsfilter");
data->colorSpace = gst_element_factory_make("ffmpegcolorspace", "csp");
data->cspappsink_capsfilter = gst_element_factory_make ("capsfilter", "cspappsink_capsfilter");
data->appsink = gst_element_factory_make("appsink", "asink");
data
(!data->video_source || !data->vsource_capsfilter || !data->deinterlace || !data->nv_video_mixer || !data->mixercsp_capsfilter || !data->appsink \
if || !data->colorSpace || !data->cspappsink_capsfilter)
{
("Not all elements for video were created.\n");
g_printerr ;
return FALSE}
( data->pipeline, "deep-notify", G_CALLBACK( gst_object_default_deep_notify ), NULL );
g_signal_connect
((GstAppSink*)data->appsink, true);
gst_app_sink_set_emit_signals((GstAppSink*)data->appsink, true);
gst_app_sink_set_drop((GstAppSink*)data->appsink, 1);
gst_app_sink_set_max_buffers
->srcdeinterlace_caps = gst_caps_from_string("video/x-raw-yuv, width=(int)720, height=(int)576, format=(fourcc)I420, framerate=(fraction)1/1");
data(!data->srcdeinterlace_caps)
if ("1. Could not create media format string.\n");
g_printerr(G_OBJECT (data->vsource_capsfilter), "caps", data->srcdeinterlace_caps, NULL);
g_object_set (data->srcdeinterlace_caps);
gst_caps_unref
->mixercsp_caps = gst_caps_from_string("video/x-raw-yuv, width=(int)720, height=(int)576, format=(fourcc)I420, framerate=(fraction)1/1, pixel-aspect-ratio=(fraction)1/1");
data(!data->mixercsp_caps)
if ("2. Could not create media format string.\n");
g_printerr(G_OBJECT (data->mixercsp_capsfilter), "caps", data->mixercsp_caps, NULL);
g_object_set (data->mixercsp_caps);
gst_caps_unref
->cspappsink_caps = gst_caps_from_string("video/x-raw-yuv, width=(int)720, height=(int)576, format=(fourcc)I420, framerate=(fraction)1/1");
data(!data->cspappsink_caps)
if ("3. Could not create media format string.\n");
g_printerr(G_OBJECT (data->cspappsink_capsfilter), "caps", data->cspappsink_caps, NULL);
g_object_set (data->cspappsink_caps);
gst_caps_unref
->bin_capture = gst_bin_new ("bin_capture");
data
/*if(g_signal_connect(data->appsink, "new-buffer", G_CALLBACK(CaptureGstBuffer), NULL) <= 0)
{
g_printerr("Could not connect signal handler.\n");
exit(1);
}*/
(GST_BIN (data->bin_capture), data->video_source, data->vsource_capsfilter, data->deinterlace, data->nv_video_mixer, \
gst_bin_add_many ->mixercsp_capsfilter, data->colorSpace, data->cspappsink_capsfilter, data->appsink, NULL);
data
(gst_element_link_many(data->video_source, data->vsource_capsfilter, data->deinterlace, NULL) != TRUE)
if {
("video_src to deinterlace not linked.\n");
g_printerr ;
return FALSE}
(gst_element_link_many (data->deinterlace, data->nv_video_mixer, NULL) != TRUE)
if {
("deinterlace to video_mixer not linked.\n");
g_printerr ;
return FALSE}
(gst_element_link_many (data->nv_video_mixer, data->mixercsp_capsfilter, data->colorSpace, NULL) != TRUE)
if {
("video_mixer to colorspace not linked.\n");
g_printerr ;
return FALSE}
(gst_element_link_many (data->colorSpace, data->appsink, NULL) != TRUE)
if {
("colorspace to appsink not linked.\n");
g_printerr ;
return FALSE}
<< "Returns from init_video_capture." << endl;
cout ;
return TRUE}
void delete_pipeline(gstData *data)
{
(data->pipeline, GST_STATE_NULL);
gst_element_set_state ("Pipeline set to NULL\n");
g_print (data->bus);
gst_object_unref (data->pipeline);
gst_object_unref ("Pipeline deleted\n");
g_print }
(gstData *data)
gboolean add_bin_capture_to_pipe{
((gst_bin_add(GST_BIN (data->pipeline), data->bin_capture)) != TRUE)
if{
("bin_capture not added to pipeline\n");
g_print}
(gst_element_set_state (data->pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS)
if{
;
return TRUE}
else{
<< "Failed to set pipeline state to NULL." << endl;
cout ;
return FALSE}
}
(gstData *data)
gboolean remove_bin_capture_from_pipe{
(data->pipeline, GST_STATE_NULL);
gst_element_set_state (data->bin_capture, GST_STATE_NULL);
gst_element_set_state ((gst_bin_remove(GST_BIN (data->pipeline), data->bin_capture)) != TRUE)
if{
("bin_capture not removed from pipeline\n");
g_print}
;
return TRUE}
(gstData *data)
gboolean start_capture_pipe{
(gst_element_set_state (data->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS)
if;
return TRUE
else{
<< "Failed to set pipeline state to PLAYING." << endl;
cout ;
return FALSE}
}
(gstData *data)
gboolean stop_capture_pipe{
(data->bin_capture, GST_STATE_NULL);
gst_element_set_state (data->pipeline, GST_STATE_NULL);
gst_element_set_state ;
return TRUE}
(gstData *data)
gboolean deinit_video_live{
(data->pipeline, GST_STATE_NULL);
gst_element_set_state (data->bin_capture, GST_STATE_NULL);
gst_element_set_state (data->bin_capture);
gst_object_unref ;
return TRUE}
(gstData *data)
gboolean check_bus_cb{
*err = NULL;
GError *dbg = NULL;
gchar
("Got message: %s\n", GST_MESSAGE_TYPE_NAME(data->msg));
g_print(GST_MESSAGE_TYPE (data->msg))
switch{
:
case GST_MESSAGE_EOS("END OF STREAM... \n");
g_print ;
break
:
case GST_MESSAGE_ERROR(data->msg, &err, &dbg);
gst_message_parse_error (err)
if {
("ERROR: %s\n", err->message);
g_printerr (err);
g_error_free }
(dbg)
if {
("[Debug details: %s]\n", dbg);
g_printerr (dbg);
g_free }
;
break
:
default("Unexpected message of type %d", GST_MESSAGE_TYPE (data->msg));
g_printerr ;
break}
;
return TRUE}
void get_pipeline_bus(gstData *data)
{
->bus = gst_element_get_bus (data->pipeline);
data->msg = gst_bus_poll (data->bus, GST_MESSAGE_EOS | GST_MESSAGE_ERROR, -1);
data(GST_MESSAGE_TYPE (data->msg))
if{
(data);
check_bus_cb}
(data->msg);
gst_message_unref }
int main(int argc, char *argv[])
{
//Mat frame;
;
VideoCapture capture;
gstData gstreamerData*gstImageBuffer;
GstBuffer
//XInitThreads();
(&argc, &argv);
gst_init (&gstreamerData);
create_pipeline(init_video_capture(&gstreamerData))
if{
(&gstreamerData);
add_bin_capture_to_pipe(&gstreamerData);
start_capture_pipe//get_pipeline_bus(&gstreamerData);
<< "Starting while loop..." << endl;
cout ("Toradex Face Detection Demo with Gstreamer", 0);
cvNamedWindow
(true)
while{
//pthread_mutex_lock(&threadMutex);
//pthread_cond_wait(&waitForGstBuffer, &threadMutex);
= gst_app_sink_pull_buffer((GstAppSink*)gstreamerData.appsink);
gstImageBuffer
(gstImageBuffer != NULL)
if {
= cvCreateImage(cvSize(720, 576), IPL_DEPTH_8U, 1);
frame
(frame == NULL)
if {
("IplImageFrame is null.\n");
g_printerr}
else{
->imageData = (char*)GST_BUFFER_DATA(gstImageBuffer);
frame(frame->imageData == NULL)
if {
("IplImage data is null.\n");
g_printerr}
("Toradex Face Detection Demo with Gstreamer", frame);
cvShowImage(1);
cvWaitKey(gstImageBuffer);
gst_buffer_unref}
}
else{
<< "Appsink buffer didn't return buffer." << endl;
cout }
/*
if (frame)
{
cvShowImage("Toradex Face Detection Demo with Gstreamer", frame);
}
gst_buffer_unref(buffer);
buffer = NULL;
pthread_mutex_unlock(&threadMutex);
cvWaitKey(1);*/
}
}
else{
(1);
exit}
//Destroy the window
("Toradex Face Detection Demo with Gstreamer");
cvDestroyWindow(&gstreamerData);
remove_bin_capture_from_pipe(&gstreamerData);
deinit_video_live(&gstreamerData);
delete_pipeline
0;
return }