View Single Post
Posts: 8 | Thanked: 1 time | Joined on May 2010
#27
Hi,

I think the idea for someone who has a working Qt camera example to publish it would be highly useful (best way to learn for beginners). I'd really appreciate it as well.

I tried building a Qt camera program based on an example, which seems to have been taken offline.
Code:
#include <QtGui/QApplication>
#include <gst/gst.h>
#include "mainwindow.h"

#ifdef HAVE_CONFIG_H
#include "config.h"
#endif

#include "main.h"

#include <QApplication>
#include <QTimer>

#include <gst/interfaces/xoverlay.h>

#include <stdlib.h>
#include "fast.h"

#define DEFAULT_VIDEOSINK "autovideosink"

#define IMGWIDTH 400
#define IMGHEIGHT 256

float *img;
byte *myimg;
int ret_num_corners, b=30;
xy* result;

GMainLoop *loop;

GstElement *pipeline, *camsource, *caps_yuv, *caps_rgb, *colorspace2, *colorspace, *xvsink;
GstBus *bus;

int rgb=1;

static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;

    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }

  return TRUE;
}


SinkPipeline::SinkPipeline(QGraphicsView *parent) : QObject(parent)
{
  GstStateChangeReturn sret;

  // Create gstreamer elements
  // pipeline = gst_pipeline_new("gst-test");
  camsource = gst_element_factory_make("v4l2camsrc", NULL); //v4l2camsrc
  caps_rgb = gst_element_factory_make("capsfilter", NULL);
  colorspace = gst_element_factory_make("ffmpegcolorspace", NULL);
  //colorspace2 = gst_element_factory_make("ffmpegcolorspace", NULL);
  xvsink = gst_element_factory_make("xvimagesink", NULL);

  if (!(pipeline && camsource && caps_yuv && colorspace && caps_rgb && colorspace2 && xvsink)) {
    g_printerr ("One element could not be created. Exiting.\n");
  }

  //  Set up the pipeline

  // we set the input filename to the source element
  char yuvcapsstr[256], rgbcapsstr[256];
  sprintf(yuvcapsstr, "video/x-raw-yuv,width=%d,height=%d,bpp=24,depth=24,framerate=25/1", IMGWIDTH, IMGHEIGHT);
  sprintf(rgbcapsstr, "video/x-raw-rgb,width=%d,height=%d,bpp=32,depth=24,framerate=25/1", IMGWIDTH, IMGHEIGHT);

  g_object_set(G_OBJECT(caps_yuv), "caps", gst_caps_from_string(yuvcapsstr), NULL);
  g_object_set(G_OBJECT(caps_rgb), "caps", gst_caps_from_string(rgbcapsstr), NULL);

  // we add a message handler
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  if(rgb){
    // We add a buffer probe RGB
    GstPad *pad = gst_element_get_pad(caps_rgb, "src");
    gst_object_unref(pad);
    g_print("RGB\n");
  }else{
    // We add a buffer probe YUV
    GstPad *pad = gst_element_get_pad(caps_yuv, "src");
    gst_object_unref(pad);
    g_print("YUV\n");
  }

  // Create pipeline & test source
  pipeline = gst_pipeline_new ("xvoverlay");
  src = gst_element_factory_make ("videotestsrc", NULL);

  if ((sink = gst_element_factory_make ("xvimagesink", NULL))) {
    sret = gst_element_set_state (sink, GST_STATE_READY);
    if (sret != GST_STATE_CHANGE_SUCCESS) {
      gst_element_set_state (sink, GST_STATE_NULL);
      gst_object_unref (sink);

      if ((sink = gst_element_factory_make ("ximagesink", NULL))) {
        sret = gst_element_set_state (sink, GST_STATE_READY);
        if (sret != GST_STATE_CHANGE_SUCCESS) {
          gst_element_set_state (sink, GST_STATE_NULL);
          gst_object_unref (sink);

          if (strcmp (DEFAULT_VIDEOSINK, "xvimagesink") != 0 &&
              strcmp (DEFAULT_VIDEOSINK, "ximagesink") != 0) {

            if ((sink = gst_element_factory_make (DEFAULT_VIDEOSINK, NULL))) {
              if (!GST_IS_BIN (sink)) {
                sret = gst_element_set_state (sink, GST_STATE_READY);
                if (sret != GST_STATE_CHANGE_SUCCESS) {
                  gst_element_set_state (sink, GST_STATE_NULL);
                  gst_object_unref (sink);
                  sink = NULL;
                }
              } else {
                gst_object_unref (sink);
                sink = NULL;
              }
            }
          }
        }
      }
    }
  }

  if (sink == NULL)
    g_error ("Couldn't find a working video sink.");

  gst_bin_add_many (GST_BIN (pipeline), src, sink, caps_rgb, caps_yuv, colorspace, NULL);
  gst_element_link_many (src, colorspace, caps_rgb, sink, NULL);

  xwinid = parent->winId();
}

SinkPipeline::~SinkPipeline()
{
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
}

//Setzt im Wesentlichen Pipeline auf Status Playing
void SinkPipeline::startPipeline()
{
  GstStateChangeReturn sret;

  /* we know what the video sink is in this case (xvimagesink), so we can
   * just set it directly here now (instead of waiting for a prepare-xwindow-id
   * element message in a sync bus handler and setting it there)*/

  gst_x_overlay_set_xwindow_id (GST_X_OVERLAY (sink), xwinid);

  sret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (sret == GST_STATE_CHANGE_FAILURE) {
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    // Exit application
    QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
  }

  // Allow e display to be delayed
  g_object_set(G_OBJECT(xvsink), "sync", FALSE, NULL);
}


int main(int argc, char *argv[])
{
    QApplication a(argc, argv);
    MainWindow w;

    QGraphicsScene scene;
    scene.setSceneRect(-100.0, -100.0, 200.0, 200.0);

    QGraphicsView graphicsView (&scene);
    graphicsView.resize(400,256);//800,480
    graphicsView.setWindowTitle("Fancy application");
    graphicsView.show();

    img = (float*)malloc(sizeof(float)*400*256);
    myimg = (byte*)malloc(sizeof(byte)*400*256);

    loop = g_main_loop_new (NULL, FALSE);

    // Initialisation
    gst_init (&argc, &argv); //init gstreamer
    SinkPipeline sinkpipe(&graphicsView);
    sinkpipe.startPipeline();

    // Iterate
    g_print("Running...\n");
    g_main_loop_run(loop);

    // Out of the main loop, clean up nicely
    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);

    g_print("Deleting pipeline\n");
    gst_object_unref(GST_OBJECT(pipeline));
    free(img);
    free(myimg);
    a.quit();
}
Of course, one has to add
Code:
INCLUDEPATH += /usr/include/gstreamer-0.10 /usr/include/glib-2.0 /usr/lib/glib-2.0/include /usr/include/libxml2
LIBS          += -lgstreamer-0.10 -lgobject-2.0 -lgmodule-2.0 -lgthread-2.0 -lrt -lxml2 -lglib-2.0 -lgstinterfaces-0.10
to the .pro file to compile this source.

The funny thing is that it works fine with the test source, but only displays a white screen when used with the actual source. Any way to fix this?

Btw, tpaixao: /dev/video0 is the main cam, /dev/video1 the front cam - I just don't know how to select it in gstreamer, yet, but maybe it helps.

Thanks in advance.
 

The Following User Says Thank You to Dorfmeister For This Useful Post: