gstreamer 1.0 rtspsrc 到 rtph264depay 无法链接

标签 gstreamer rtsp

我正在尝试使用 gstreamer 使用以下命令连接到 ubiquiti 相机并成功连接。

gst-launch-1.0 --gst-debug=4 rtspsrc location="rtsp://:554/live/ch00_0"! rtph264depay ! h264解析! openh264dec ! d3dvideosink

我查看了调试,它说无法将 pads rtspsrc 链接到 rtph264depay。
Picture

但它可以正常播放并且可以看到视频。当我把它放到一个 c 项目中时,它说无法将源链接到 rtph264parse。我环顾四周,说要使用带有以下代码的动态垫

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data)
{
    GstPad *sinkpad;
    GstElement *decoder = (GstElement *) data;
    /* We can now link this pad with the rtsp-decoder sink pad */
    g_print ("Dynamic pad created, linking source/demuxer\n");
    sinkpad = gst_element_get_static_pad (decoder, "sink");
    gst_pad_link (pad, sinkpad);
    gst_object_unref (sinkpad);
}

int main(int argc, char *argv[])
{
   /* Initialize GStreamer */
   gst_init(&argc,&argv);

   /* Build Pipeline */ 
   pipel.pipeline = gst_pipeline_new("My pipeline");
   pipel.source = gst_element_factory_make ("rtspsrc","source");
   g_object_set (G_OBJECT (pipel.source), "latency",2000,NULL);
   pipel.rtppay = gst_element_factory_make( "rtph264depay", "depayl");
   pipel.parse = gst_element_factory_make("h264parse","parse");
   pipel.filter1 = gst_element_factory_make("capsfilter","filter");
   pipel.decodebin = gst_element_factory_make ("openh264dec","decode");
   pipel.sink = gst_element_factory_make("d3dvideosink","sink");

   g_object_set (G_OBJECT (pipel.sink), "sync",FALSE,NULL);

   //create_uri(url,url_size,ip_address,port);
   g_object_set(GST_OBJECT(pipel.source),"location","rtsp://<IP>:554/live/ch00_0",NULL);

   filtercaps = gst_caps_from_string("application/x-rtp");
   g_object_set (G_OBJECT (pipel.filter1), "caps",filtercaps,NULL);

   gst_caps_unref(filtercaps);

   gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.source
                                          ,pipel.rtppay
                                          ,pipel.parse
                                          ,pipel.decodebin
                                          ,pipel.sink
                                          ,NULL);
   if(!gst_element_link(pipel.source,pipel.rtppay))
        printf("\nFailed source to rtppay\n");
   if(!gst_element_link_many(pipel.parse,pipel.decodebin,pipel.sink,NULL))
        printf("\nFailed to link parse to sink");

    g_signal_connect(pipel.rtppay, "pad-added", G_CALLBACK(on_pad_added), pipel.parse);
}

最佳答案

static void cb_new_rtspsrc_pad(GstElement *element,GstPad*pad,gpointer  data)
{
    gchar *name;
    GstCaps * p_caps;
    gchar * description;
    GstElement *p_rtph264depay;

    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created\n", name);

    // here, you would setup a new pad link for the newly created pad
    // sooo, now find that rtph264depay is needed and link them?
    p_caps = gst_pad_get_pad_template_caps (pad);

    description = gst_caps_to_string(p_caps);
    printf("%s\n",p_caps,", ",description,"\n");
    g_free(description);

    p_rtph264depay = GST_ELEMENT(data);

    // try to link the pads then ...
    if(!gst_element_link_pads(element, name, p_rtph264depay, "sink"))
    {
        printf("Failed to link elements 3\n");
    }

    g_free(name);
}

/* ---------- Main --------------- */
int main(int argc, char *argv[])
{
    /* Initialize GStreamer */
    gst_init(&argc,&argv);

    /* Build Pipeline */ 
    pipel.pipeline = gst_pipeline_new("My pipeline");

    creating_pipeline(ip_address,port);

    pipel.source = gst_element_factory_make ("rtspsrc","source");
    g_object_set (G_OBJECT (pipel.source), "latency",2000,NULL);
    pipel.rtppay = gst_element_factory_make( "rtph264depay", "depayl");
    pipel.parse = gst_element_factory_make("h264parse","parse");
    pipel.filter1 = gst_element_factory_make("capsfilter","filter");
    pipel.decodebin = gst_element_factory_make ("openh264dec","decode");
    pipel.sink = gst_element_factory_make("d3dvideosink","sink");

    g_object_set (G_OBJECT (pipel.sink), "sync",FALSE,NULL);

    //create_uri(url,url_size,ip_address,port);
    g_object_set(GST_OBJECT(pipel.source),"location","rtsp://<ip>:554/live/ch00_0",NULL);

    filtercaps = gst_caps_from_string("application/x-rtp");
    g_object_set (G_OBJECT (pipel.filter1), "caps",filtercaps,NULL);

    gst_caps_unref(filtercaps);

    gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.source
            ,pipel.rtppay
            ,NULL);
    // listen for newly created pads
    g_signal_connect(pipel.source, "pad-added", G_CALLBACK(cb_new_rtspsrc_pad),pipel.rtppay);
    gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.parse,NULL);
    if(!gst_element_link(pipel.rtppay,pipel.parse))
        printf("\nNOPE\n");

    gst_bin_add_many (GST_BIN (pipel.pipeline),pipel.decodebin
            ,pipel.sink
            ,NULL);

    if(!gst_element_link_many(pipel.parse,pipel.decodebin,pipel.sink,NULL))
        printf("\nFailed to link parse to sink");

    g_signal_connect(pipel.rtppay, "pad-added", G_CALLBACK(on_pad_added), pipel.parse);
}

现在工作!
  • 使用 cb_new_rtspsrc_pad 动态添加 pad
  • 将解析添加到 bin
  • rtppay 和 parse 之间的链接
  • 在字符串的其余部分添加必要的元素以使其工作。
  • 关于gstreamer 1.0 rtspsrc 到 rtph264depay 无法链接,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/32233370/

    相关文章:

    ffmpeg - 将 RTSP 视频保存并重新流式传输为直接 UDP

    c++ - 如何实现一个微型 RTSP 服务器?

    web - 在 Web 浏览器上使用带有 Janus 或 WebRTC 的 GStreamer 的实时视频流

    linux - Gstreamer-sharp,构建 libgstreamersharpglue.so

    python - Gnonlin 中的预定暂停

    c++ - 我用 Gstreamer MSVC 1.16.1 构建 opencv 3.4,现在 imread 和 VideoCapture 不起作用

    c++ - 尝试为 rtsp 流填充 yuv 图像时出现段错误

    video - RTSP 帧抓取会产生拖尾、像素化和损坏的图像

    gstreamer - 通过 UDP 使用 gstreamer 进行网络摄像头流式传输

    html - 浏览器中的 Raspberry Pi RTSP 流