Вставьте мат OpenCV в конвейер DeepStream

Я хотел бы открыть видеопоток с помощью OpenCv и отправить кадр за кадром внутри конвейера DeepStream, чтобы использовать tesornRT для вывода на модель Yolov3, но я не знаю, как заставить его работать.

Я пытаюсь следовать директивам, которые нашел здесь, но все равно ничего...

Это мой код:

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <opencv2/core/core.hpp>
#include <opencv2/core/types_c.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>

static GMainLoop *loop;

static void
cb_need_data (GstElement *appsrc,
          guint       unused_size,
          gpointer    user_data)
{
  static gboolean white = FALSE;
  static GstClockTime timestamp = 0;
  guint size,depth,height,width,step,channels;
  GstFlowReturn ret ;
  IplImage* img;
  guchar *data1;
  GstMapInfo map;

  cv::Mat imgMat = imread("cat.jpg",cv::IMREAD_COLOR);
  cvtColor(imgMat,imgMat,cv::COLOR_BGR2YUV);
  IplImage imgIpl = imgMat;
  img = &imgIpl;


  height    = img->height;  
  width     = img->width;
  step      = img->widthStep;
  channels  = img->nChannels;
  depth     = img->depth;
  data1      = (guchar *)img->imageData;
  size = height*width*channels;

  GstBuffer *buffer = NULL;//gst_buffer_new_allocate (NULL, size, NULL);

  g_print("frame_height: %d \n",img->height);
  g_print("frame_width: %d \n",img->width);
  g_print("frame_channels: %d \n",img->nChannels);
  g_print("frame_size: %d \n",height*width*channels);
  

  buffer = gst_buffer_new_allocate (NULL, size, NULL);
  gst_buffer_map (buffer, &map, GST_MAP_WRITE);
  memcpy( (guchar *)map.data, data1,  gst_buffer_get_size( buffer ) );
  /* this makes the image black/white */
  //gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);

  white = !white;

  GST_BUFFER_PTS (buffer) = timestamp;
  GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);

  timestamp += GST_BUFFER_DURATION (buffer);
  //gst_app_src_push_buffer ((GstAppSrc *)appsrc, buffer);

  g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);

  if (ret != GST_FLOW_OK) {
    g_print("quit");
    /* something wrong, stop pushing */
    g_main_loop_quit (loop);
  }
  //g_print("return");
}

gint
main (gint   argc,
      gchar *argv[])
{
  GstElement *pipeline, *appsrc, *conv, *videosink, *sink,*nvosd,*streammux;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* setup pipeline */
  pipeline = gst_pipeline_new ("pipeline");
  appsrc = gst_element_factory_make ("appsrc", "source");
  conv = gst_element_factory_make ("videoconvert", "conv");
  streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
  sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
  //videosink = gst_element_factory_make("appsink","app-sink");

  /* setup */
  g_object_set (G_OBJECT (appsrc), "caps",
        gst_caps_new_simple ("video/x-raw",
                     "format", G_TYPE_STRING, "RGB",
                     "width", G_TYPE_INT, 640,
                     "height", G_TYPE_INT, 360,
                     "framerate", GST_TYPE_FRACTION, 1, 1,
                     NULL), NULL);

  gst_bin_add_many (GST_BIN (pipeline), appsrc, conv,streammux,sink,NULL);
  gst_element_link_many (appsrc,conv,streammux,sink ,NULL);
  //g_object_set (videosink, "device", "/dev/video0", NULL);

  /* setup appsrc */
  g_object_set (G_OBJECT (appsrc),
        "stream-type", 0,
        "format", GST_FORMAT_TIME, NULL);
  g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);

  /* play */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  /* clean up */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  g_main_loop_unref (loop);

  return 0;
  }

Я абсолютно новичок, если кто-то покажет код, будет намного лучше.

Спасибо.

1 ответ

вам нужно создать конвейер следующим образом

      appsrc ! nvvideoconvert ! nvstreammux ! nvinfer ! nvvideoconvert ! nvdsosd ! nveglglessink

"appsrc" принимает ваш фрейм в качестве входных данных

"nvvideoconvert" выполняет преобразование формата

"nvstreammux" мультиплексирует потоки в случае нескольких источников

"nvinfer" делает вывод о входном потоке

"nvvideoconvert" теперь конвертирует кадр в RGBA

"nvdsosd" рисует ограничивающие рамки на фрейме.

"nveglglessink" отображает рамку

      #include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <opencv2/core/core.hpp>
#include <opencv2/core/types_c.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/highgui/highgui.hpp>
static GMainLoop *loop;
    
    #define APPSRC_WIDTH 320
    #define APPSRC_HEIGHT 240
    
    #define RUN_VIDEO 0
    
    static void
    cb_need_data (GstElement *appsrc,
              guint       unused_size,
              gpointer    user_data)
    {
      static gboolean white = FALSE;
      static GstClockTime timestamp = 0;
      guint size,depth,height,width,step,channels;
      GstFlowReturn ret ;
      IplImage* img;
      guchar *data1;
      GstMapInfo map;
    
      cv::Mat imgMat = imread("/opt/nvidia/deepstream/deepstream-4.0/samples/streams/sample_720p.jpg",cv::IMREAD_COLOR);
      cv::resize(imgMat, imgMat, cv::Size(APPSRC_WIDTH, APPSRC_HEIGHT));
      cvtColor(imgMat,imgMat,cv::COLOR_BGR2RGBA);
      IplImage imgIpl = imgMat;
      img = &imgIpl;
    
    
      height    = img->height;
      width     = img->width;
      step      = img->widthStep;
      channels  = img->nChannels;
      depth     = img->depth;
      data1      = (guchar *)img->imageData;
      size = height*width*channels;
    
      GstBuffer *buffer = NULL;//gst_buffer_new_allocate (NULL, size, NULL);
    
      g_print("frame_height: %d \n",img->height);
      g_print("frame_width: %d \n",img->width);
      g_print("frame_channels: %d \n",img->nChannels);
      g_print("frame_size: %d \n",height*width*channels);
    
    
      buffer = gst_buffer_new_allocate (NULL, size, NULL);
      gst_buffer_map (buffer, &map, GST_MAP_WRITE);
      memcpy( (guchar *)map.data, data1,  gst_buffer_get_size( buffer ) );
      /* this makes the image black/white */
      //gst_buffer_memset (buffer, 0, white ? 0xff : 0x0, size);
    
      white = !white;
    
      GST_BUFFER_PTS (buffer) = timestamp;
      GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);
    
      timestamp += GST_BUFFER_DURATION (buffer);
      //gst_app_src_push_buffer ((GstAppSrc *)appsrc, buffer);
    
      g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
    
      if (ret != GST_FLOW_OK) {
        g_print("quit");
        /* something wrong, stop pushing */
        g_main_loop_quit (loop);
      }
      //g_print("return");
    }
    
    gint
    main (gint   argc,
          gchar *argv[])
    {
          GstElement *pipeline, *appsrc, *conv, *capsfilter_converter, *videosink,*streammux, *nvinfer, *nvconv, *nvosd,*sink;
          GstElement *filesrc, *parser, *decoder;
          GstCaps * scaler_caps = NULL, *convertCaps = NULL, *nvconvert_caps;
    
      /* init GStreamer */
      gst_init (&argc, &argv);
      loop = g_main_loop_new (NULL, FALSE);
    
    
      /* setup pipeline */
      pipeline = gst_pipeline_new ("pipeline");
      appsrc = gst_element_factory_make ("appsrc", "source");
      filesrc = gst_element_factory_make ("filesrc", "file-source");
      parser = gst_element_factory_make ("h264parse", "parser");
      decoder = gst_element_factory_make ("nvv4l2decoder", "decoder");
      conv = gst_element_factory_make ("nvvideoconvert", "nv-conv-1");
      capsfilter_converter = gst_element_factory_make ("capsfilter",    "converter-caps");
      streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");
      nvinfer = gst_element_factory_make ("nvinfer", "nv-infer");
      nvconv = gst_element_factory_make ("nvvideoconvert", "nv-conv-2");
      nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");
      sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
    
      /* setup */
      g_object_set (G_OBJECT (appsrc), "caps",
            gst_caps_new_simple ("video/x-raw",
                         "format", G_TYPE_STRING, "RGBA",
                         "width", G_TYPE_INT, APPSRC_WIDTH,
                         "height", G_TYPE_INT, APPSRC_HEIGHT,
                         "framerate", GST_TYPE_FRACTION, 1, 1,
                         NULL), NULL);
    
      capsfilter_converter = gst_element_factory_make ("capsfilter",    "converter-caps");
      nvconvert_caps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "RGBA", NULL);
      GstCapsFeatures *feature = NULL;
      feature = gst_caps_features_new ("memory:NVMM", NULL);
      gst_caps_set_features (nvconvert_caps, 0, feature);
      g_object_set (G_OBJECT (capsfilter_converter), "caps", nvconvert_caps, NULL);
    
      g_object_set (G_OBJECT (streammux), "width", APPSRC_WIDTH, "height",
              APPSRC_HEIGHT, "batch-size", 1,
          "batched-push-timeout", 5000, NULL);
      g_object_set (G_OBJECT (conv),
              "nvbuf-memory-type", 0,
              "num-surfaces-per-frame", 1,
              NULL);
      g_object_set (G_OBJECT (streammux),
              "nvbuf-memory-type", 0,
              "num-surfaces-per-frame", 1,
              NULL);
      g_object_set (G_OBJECT (filesrc), "location",
              "/opt/nvidia/deepstream/deepstream-4.0/samples/streams/sample_720p.h264", NULL);
    
      std::string config_file_path_FR = "/opt/nvidia/deepstream/deepstream-4.0/samples/configs/deepstream-app/config_infer_primary.txt";
    
      g_object_set (G_OBJECT (nvinfer),
          "config-file-path", config_file_path_FR.c_str(), NULL);
    
    #if RUN_VIDEO
      gst_bin_add_many (GST_BIN (pipeline), filesrc, parser, decoder, conv,streammux, nvinfer, nvosd, nvconv, sink,NULL);
    #else
      gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, capsfilter_converter, streammux, nvinfer, nvosd, nvconv, sink,NULL);
    #endif
      GstPad *sinkpad, *srcpad;
      gchar pad_name[16] = { };
    
      g_snprintf (pad_name, 15, "sink_%u", 0);
      sinkpad = gst_element_get_request_pad (streammux, pad_name);
      if (!sinkpad) {
        g_printerr ("Streammux request sink pad failed. Exiting.\n");
        return -1;
      }
    #if RUN_VIDEO
      srcpad = gst_element_get_static_pad (decoder, "src");
    #else
      srcpad = gst_element_get_static_pad (capsfilter_converter, "src");
    #endif
      if (!srcpad) {
        g_printerr ("Failed to get src pad of source bin. Exiting.\n");
        return -1;
      }
    
      if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
        g_printerr ("Failed to link source bin to stream muxer. Exiting.\n");
        return -1;
      }
    
    #if RUN_VIDEO
      gst_element_link_many (filesrc, parser, decoder, NULL);
    #else
      gst_element_link_many (appsrc,conv, capsfilter_converter, NULL);
    #endif
      gst_element_link_many (streammux, nvinfer, nvconv, nvosd, sink ,NULL);
    
    
      g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data), NULL);
    
      /* play */
      gst_element_set_state (pipeline, GST_STATE_PLAYING);
      g_main_loop_run (loop);
    
      /* clean up */
      gst_element_set_state (pipeline, GST_STATE_NULL);
      gst_object_unref (GST_OBJECT (pipeline));
      g_main_loop_unref (loop);
    
      return 0;
     }

чтобы выполнить вывод для вашей модели, вам необходимо указать путь к конфигурационному файлу для вашей модели и указать путь к изображению / видео, для которого вы хотите выполнить вывод.

чтобы запустить это на видео в кодировке h264, просто измените #define RUN_VIDEO 0к #define RUN_VIDEO 1

Другие вопросы по тегам