Pushing images into a gstreamer pipeline

10,603

Solution 1

GStreamer uses plugins to do everything. Plugins that create data or take it from an external source are called "src" plugins.

The generic src plugin for injecting application-generated data into a pipeline is called appsrc. The API provided by appsrc is documented as part of the App Library.

Here's one example that demonstrates feeding appsrc with generated images: gdk-gstappsrc-stream.c. It seems to be derived from some test code in the GStreamer source tree: here.

Another approach would be to create your own src plugin. Look at the goom music visualization plugin for an example that seems to work in a way similar to what you have specified.

Solution 2

I found a solution (maybe) to this (i get the images with OpenCV) ... but i have an error with the pipeline: ERROR from element mysource: Error en el flujo de datos interno. Debugging info: gstbasesrc.c(2574): gst_base_src_loop (): /GstPipeline:pipeline0/GstAppSrc:mysource: streaming task paused, reason not-negotiated (-4)

this is the code:

typedef struct _App App;
struct _App{
    GstElement *pipeline;
    GstElement *appsrc;

    GMainLoop *loop;
    guint sourceid;
    GTimer *timer;
};

App s_app;
CvCapture *capture;
static gboolean read_data(App *app){
    GstFlowReturn ret;
    GstBuffer *buffer = gst_buffer_new();
    IplImage* frame = cvQueryFrame(capture);
    GST_BUFFER_DATA(buffer) = (uchar*)frame->imageData;
    GST_BUFFER_SIZE(buffer) = frame->width*frame->height*sizeof(uchar*);
    g_signal_emit_by_name(app->appsrc,"push-buffer",buffer,&ret);
    gst_buffer_unref(buffer);
    if(ret != GST_FLOW_OK){
        GST_DEBUG("Error al alimentar buffer");
        return FALSE;
    }
    return TRUE;
 }

static void start_feed(GstElement* pipeline,guint size, App* app){
    if(app->sourceid == 0){
        GST_DEBUG("Alimentando");
        app->sourceid = g_idle_add((GSourceFunc) read_data, app);
    }
 }

static void stop_feed(GstElement* pipeline, App* app){
    if(app->sourceid !=0 ){
        GST_DEBUG("Stop feeding");
        g_source_remove(app->sourceid);
        app->sourceid = 0;
    }
}

static gboolean
bus_message (GstBus * bus, GstMessage * message, App * app)
{
   GST_DEBUG ("got message %s",
   gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR: {
        GError *err = NULL;
        gchar *dbg_info = NULL;
        gst_message_parse_error (message, &err, &dbg_info);
        g_printerr ("ERROR from element %s: %s\n",
            GST_OBJECT_NAME (message->src), err->message);
        g_printerr ("Debugging info: %s\n", (dbg_info) ? dbg_info : "none");
        g_error_free (err);
        g_free (dbg_info);
        g_main_loop_quit (app->loop);
        break;
    }
    case GST_MESSAGE_EOS:
      g_main_loop_quit (app->loop);
      break;
   default:
      break;
  }
  return TRUE;
}

int main(int argc, char* argv[]){
    App *app = &s_app;
    GError *error = NULL;
    GstBus *bus;
    GstCaps *caps;
    capture = cvCaptureFromCAM(0);
    gst_init(&argc,&argv);
    /* create a mainloop to get messages and to handle the idle handler that will
     * feed data to appsrc. */
    app->loop = g_main_loop_new (NULL, TRUE);
    app->timer = g_timer_new();

    app->pipeline = gst_parse_launch("appsrc name=mysource ! video/x-raw-rgb,width=640,height=480,bpp=24,depth=24 ! ffmpegcolorspace ! videoscale method=1 ! theoraenc bitrate=150 ! tcpserversink host=127.0.0.1 port=5000", NULL);
    g_assert (app->pipeline);
    bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
    g_assert(bus);
    /* add watch for messages */
    gst_bus_add_watch (bus, (GstBusFunc) bus_message, app);
   /* get the appsrc */
    app->appsrc = gst_bin_get_by_name (GST_BIN(app->pipeline), "mysource");
    g_assert(app->appsrc);
    g_assert(GST_IS_APP_SRC(app->appsrc));
    g_signal_connect (app->appsrc, "need-data", G_CALLBACK (start_feed), app);
    g_signal_connect (app->appsrc, "enough-data", G_CALLBACK (stop_feed), app);

    /* set the caps on the source */
   caps = gst_caps_new_simple ("video/x-raw-rgb",
     "bpp",G_TYPE_INT,24,
     "depth",G_TYPE_INT,24,
     "width", G_TYPE_INT, 640,
     "height", G_TYPE_INT, 480,
     NULL);
   gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps);
  /* go to playing and wait in a mainloop. */
  gst_element_set_state (app->pipeline, GST_STATE_PLAYING);

   /* this mainloop is stopped when we receive an error or EOS */
   g_main_loop_run (app->loop);
   GST_DEBUG ("stopping");
  gst_element_set_state (app->pipeline, GST_STATE_NULL);
   gst_object_unref (bus);
   g_main_loop_unref (app->loop);
   cvReleaseCapture(&capture);
 return 0;
 }

Any idea???

Solution 3

You might try hacking imagefreeze to do what you want. appsrc might also do it.

Share:
10,603
Hoaitri Ho
Author by

Hoaitri Ho

Updated on June 17, 2022

Comments

  • Hoaitri Ho
    Hoaitri Ho almost 2 years

    After playing around with some toy applications, exploring the documentation and googling around (including the mailing list archives) I am still puzzled for what I would think is a rather common use case.

    I have an existing code that generates images (in memory) and I would like to push these images into a gstreamer pipeline (to create a flv video at the end).

    I could not find an "obvious way to do it". My best guess will be to dig in the source code of GstMultiFileSrc and its parent GstPushSrc, to figure it out.

    Could any of you point me out to the "obvious way" of doing this ? Is it there any related piece of documentation/tutorial/example on this ?

    Once I have the input right, the rest is a piece of cake, thanks to Gstreamer awesomeness ! (something like "my magic input -> ffmpegcolorspace ! ffenc_flv ! flvmux ! filesink location=desktop.flv" )

    Thanks for your answers.