Hello all

My name is Corentin and I'm working on a web multimedia project based on Django frameworkand Gstreamer, and I want to add a scene change detection fonctionality to my project.

To do this i need to acces to the video data so I take a (long..) look at Gstreamer manual and made this code. It's made with the Hello world code to read a ogg audio file (Chapter 10 in the manual), I easily turned to play ogg video file and with the code from the example of data probing (Chapter 18 in the manual)

so here is my code :

#include <gst/gst.h>

* Global objects are usually a bad thing. For the purpose of this
* example, we will use them, however.

GstElement *pipeline, *source, *parser, *decoder, *csp, *filter, *sink;

static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)

GMainLoop *loop = (GMainLoop *) data;

switch (GST_MESSAGE_TYPE (msg)) {
g_print ("End-of-stream\n");
g_main_loop_quit (loop);
gchar *debug;
GError *err;

gst_message_parse_error (msg, &err, &debug);
g_free (debug);
g_print ("Error: %s\n", err->message);
g_error_free (err);

g_main_loop_quit (loop);

return TRUE;

// this fonction deals with the pixels (it should invert the video)
static gboolean
cb_have_data (GstPad *pad,
GstBuffer *buffer,
gpointer u_data)

g_print ("Youpi, you are in the loop of the frame \n");
gint x, y;
guint16 *data = (guint16 *) GST_BUFFER_DATA (buffer), t;

// invert data
for (y = 0; y < 288; y++) {
for (x = 0; x < 384 / 2; x++) {
t = data[384 - 1 - x];
data[384 - 1 - x] = data[x];
data[x] = t;
data += 384;

return TRUE;

// add a pad
static void
new_pad (GstElement *element,
GstPad *pad,
gpointer data)

GstPad *sinkpad;

/* We can now link this pad with the audio decoder */
g_print ("Dynamic pad created, linking parser/decoder\n");
sinkpad = gst_element_get_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);


main (int argc,
char *argv[])

GMainLoop *loop;
GstBus *bus;
GstCaps *filtercaps;
GstPad *pad2;

/* initialize GStreamer */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);

/* check input arguments */
if (argc != 2) {
g_print ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;


/* create elements */
pipeline = gst_pipeline_new ("player");
source = gst_element_factory_make ("filesrc", "file-source");
parser = gst_element_factory_make ("oggdemux", "ogg-parser");
decoder = gst_element_factory_make ("theoradec", "theora-decoder");
filter = gst_element_factory_make ("capsfilter", "filter");
csp = gst_element_factory_make ("ffmpegcolorspace", "csp");
g_assert (filter != NULL); /* should always exist */

sink = gst_element_factory_make ("ximagesink", "sink");
if (!pipeline || !source || !parser || !decoder || !csp || !filter ||!sink) {
g_print ("One element could not be created\n");

return -1;

/* set filename property on the file source. Also add a message
* handler. */
g_object_set (G_OBJECT (source), "location", argv[1], NULL);

bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

/* put all elements in a bin */
gst_bin_add_many (GST_BIN (pipeline),
source, parser, decoder, csp,filter, sink, NULL);

/* link together - note that we cannot link the parser and
* decoder yet, becuse the parser uses dynamic pads. For that,
* we set a pad-added signal handler. */
gst_element_link (source, parser);
gst_element_link_many (decoder, csp , filter, sink, NULL);
g_signal_connect (parser, "pad-added", G_CALLBACK (new_pad), NULL);

// create the filter caps vvith the video caracteristics

filtercaps = gst_caps_new_simple ("video/x-raw-rgb",
"width", G_TYPE_INT, 288,
"height", G_TYPE_INT, 208,
"framerate", GST_TYPE_FRACTION, 30, 1,
//"bpp", G_TYPE_INT, 16,
//"depth", G_TYPE_INT, 16,
// "endianness", G_TYPE_INT, G_BYTE_ORDER,

g_print ("try to set the caps on the filter\n");
g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);

g_print ("try to get the pad\n");
pad2 = gst_element_get_pad (csp,"csp");
if ( pad2 == NULL )
g_print ("could not get the pad\n");

//pad2 = gst_element_get_pad (csp, "csp");

//add the probe
g_print ("\n setting the probe");
gst_pad_add_buffer_probe (pad2, G_CALLBACK (cb_have_data), NULL);
g_print ("\nprobe added\n");
gst_object_unref (pad2);

/* Now set to playing and iterate. */
g_print ("Setting to PLAYING\n");
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running\n");
g_main_loop_run (loop);

/* clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));

return 0;


To my mind the error comes from getting the pad. I take a look about this and I tried all the functions :

gst_element_get_static_pad() & gst_element_get_request_pad().

The fact is I set the filter after the csp (ffmpegcolorspace) because it think this is here I will find the video data.
I tried to set the filter before but nothing changed.
I can't find my error.

If anyone of you knows where I'm wrong it would be very nice to tell me because i'm getting mad.

You 'll find the little video I use to test my code at :
you'll need it, because some video properties are set in hard, if you try my code ! : )

Thank you by advance,