首页 > 解决方案 > GStreamer:嵌入 GTK+ 窗口时 ximagesink 不起作用

问题描述

我正在学习如何在 Linux 平台上使用 GStreamer。我已经阅读了基本教程,我想我了解我在那里做了什么。

我现在正在尝试修改 GTK+ 集成教程 (#5),使其使用实时视频管道 (v4l2src !videoconvert !ximagesink) 而不是 playbin。

当我运行它时,我的 GTK+ 应用程序窗口打开并且流进入播放状态,但我没有看到任何视频。如果我注释掉对 的调用gst_video_overlay_set_window_handle,则 ximagesink 元素会打开另一个窗口,我可以在其中看到视频按预期工作。

因此,我认为管道本身没有任何问题,但是我还没有弄清楚如何在 GTK+ 绘图区域小部件中将内容显示为叠加层。

这是我目前拥有的应用程序的精简版本:

#include <string.h>

#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>

#include <gdk/gdk.h>
#include <gdk/gdkx.h>

typedef struct CustomData
{
    GstElement *pipeline;
    GstElement *source;
    GstElement *convert;
    GstElement *sink;

    GstState    state;         // Current stat of the pipeline
} CustomData;

static void realize_cb(GtkWidget *widget, CustomData *data)
{
    GdkWindow *window;
    guintptr   window_handle;

    window = gtk_widget_get_window(widget);

    if (!gdk_window_ensure_native(window))
        g_error ("Couldn't create native window needed for GstVideoOverlay!");

    window_handle = GDK_WINDOW_XID(window);

    // Comment off the next line and the app works, opening a new window
    gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY(data->sink),
                                        window_handle);
}

static void delete_event_cb(GtkWidget  *widget,
                            GdkEvent   *event,
                            CustomData *data)
{
    gtk_main_quit();
}

static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
    if (data->state < GST_STATE_PAUSED)
    {
        GtkAllocation allocation;

        gtk_widget_get_allocation(widget, &allocation);
        cairo_set_source_rgb(cr, 0, 0, 0);
        cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
        cairo_fill(cr);
    }

    return FALSE;
}

static void create_ui(CustomData *data)
{
    GtkWidget *main_window;   // The uppermost window, containing all others
    GtkWidget *video_window;  // The drawing area where the video will be shown
    GtkWidget *controls;      // HBox to hold the buttons and slider
    GtkWidget *main_box;      // VBox to hold video window and controls
    GtkWidget *play_button, *pause_button, *stop_button;

    main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    g_signal_connect(G_OBJECT(main_window), "delete-event",
                     G_CALLBACK(delete_event_cb), data);

    video_window = gtk_drawing_area_new();
    g_signal_connect(G_OBJECT(video_window), "realize",
                     G_CALLBACK(realize_cb), data);
    g_signal_connect(G_OBJECT(video_window), "draw",
                     G_CALLBACK(draw_cb), data);

    main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
    gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE,  TRUE,  0);

    gtk_container_add(GTK_CONTAINER(main_window), main_box);
    gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);

    gtk_widget_show_all(main_window);
}

static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
    GError *err;
    gchar  *debug_info;

    gst_message_parse_error(msg, &err, &debug_info);
    g_printerr("Error received from element %s: %s\n",
               GST_OBJECT_NAME(msg->src), err->message);
    g_printerr("Debugging information; %s\n",
               debug_info ? debug_info : "none");
    g_clear_error(&err);
    g_free(debug_info);

    gtk_main_quit();
};

static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
    GstState old_state, new_state, pending_state;

    gst_message_parse_state_changed(msg, &old_state, &new_state,
                                    &pending_state);
    if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
    {
        data->state = new_state;

        g_print("State set to %s:\n", gst_element_state_get_name(new_state));
    }
}

int main(int argc, char *argv[])
{
    CustomData data = {};
    GstBus    *bus;

    gtk_init(&argc, &argv);
    gst_init(&argc, &argv);

    data.source   = gst_element_factory_make("v4l2src", "source");
    data.convert  = gst_element_factory_make("videoconvert", "convert");
    data.sink     = gst_element_factory_make("ximagesink", "sink");
    data.pipeline = gst_pipeline_new("pipeline");

    gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
                     data.sink, NULL);
    gst_element_link_many(data.source, data.convert, data.sink, NULL);

    g_object_set(data.source, "device", "/dev/video0", NULL);

    create_ui(&data);

    bus = gst_element_get_bus(data.pipeline);
    gst_bus_add_signal_watch(bus);
    g_signal_connect(G_OBJECT(bus), "message::error",
                     (GCallback)error_cb, &data);
    g_signal_connect(G_OBJECT(bus), "message::state-changed",
                     (GCallback)state_changed_cb, &data);
    gst_object_unref(bus);

    gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
    gtk_main();

    gst_element_set_state(data.pipeline, GST_STATE_NULL);
    gst_object_unref(data.pipeline);
    return 0;
}

在这方面的任何帮助将不胜感激。

标签: linuxgtkgstreamer

解决方案


我从不在这个组的人那里得到了答案。这是答案。在实现 X 窗口时,为叠加层分配视频接收器可能还为时过早,并且您需要将其绑定到的特定 GST 元素可能不是您创建的(例如,它可能已在内部创建通过您创建的接收器元素)。

为了解决这个问题,支持覆盖的 GST 接收器在适当的时间生成显式通知(通过总线同步机制)。应用程序应该注册一个总线同步处理程序,并且当接收到适当的消息(视频覆盖准备窗口句柄)时,将 X 窗口绑定到该消息的源。

请参阅https://gstreamer.freedesktop.org/documentation/video/gstvideooverlay.html?gi-language=c

这是更新的代码,它有效(注意更改realize_cb和新bus_sync_handler功能):

#include <string.h>

#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
#include <gst/video/video.h>

#include <gdk/gdk.h>
#include <gdk/gdkx.h>

typedef struct CustomData
{
    GstElement *pipeline;
    GstElement *source;
    GstElement *convert;
    GstElement *sink;

    GstState    state;         // Current state of the pipeline

    guintptr    video_window_handle;
} CustomData;

static void realize_cb(GtkWidget *widget, CustomData *data)
{
    GdkWindow *window;

    window = gtk_widget_get_window(widget);

    if (!gdk_window_ensure_native(window))
    {
        g_error ("Couldn't create native window needed for GstVideoOverlay!");
    }

    data->video_window_handle = GDK_WINDOW_XID(window);
}

static GstBusSyncReply bus_sync_handler(GstBus     *bus,
                                        GstMessage *message,
                                        CustomData *data)
{
    // Ignore all but prepare-window-handle messages
    if (!gst_is_video_overlay_prepare_window_handle_message(message))
    {
        return GST_BUS_PASS;
    }

    if (data->video_window_handle)
    {
        g_print("About to assign window to overlay\n");

        gst_video_overlay_set_window_handle(
            GST_VIDEO_OVERLAY(GST_MESSAGE_SRC(message)),
            data->video_window_handle);
    }
    else
    {
        g_warning("Should have gotten a video window handle by now\n");
    }
}

static void delete_event_cb(GtkWidget  *widget,
                            GdkEvent   *event,
                            CustomData *data)
{
    gtk_main_quit();
}

static gboolean draw_cb(GtkWidget *widget, cairo_t *cr, CustomData *data)
{
    if (data->state < GST_STATE_PAUSED)
    {
        GtkAllocation allocation;

        gtk_widget_get_allocation(widget, &allocation);
        cairo_set_source_rgb(cr, 0, 0, 0);
        cairo_rectangle(cr, 0, 0, allocation.width, allocation.height);
        cairo_fill(cr);
    }

    return FALSE;
}

static void create_ui(CustomData *data)
{
    GtkWidget *main_window;   // The uppermost window, containing all others
    GtkWidget *video_window;  // The drawing area where the video will be shown
    GtkWidget *controls;      // HBox to hold the buttons and slider
    GtkWidget *main_box;      // VBox to hold video window and controls
    GtkWidget *play_button, *pause_button, *stop_button;

    main_window = gtk_window_new(GTK_WINDOW_TOPLEVEL);
    g_signal_connect(G_OBJECT(main_window), "delete-event",
                     G_CALLBACK(delete_event_cb), data);

    video_window = gtk_drawing_area_new();
    g_signal_connect(G_OBJECT(video_window), "realize",
                     G_CALLBACK(realize_cb), data);
    g_signal_connect(G_OBJECT(video_window), "draw",
                     G_CALLBACK(draw_cb), data);

    main_box = gtk_box_new(GTK_ORIENTATION_VERTICAL, 0);
    gtk_box_pack_start(GTK_BOX(main_box), video_window, TRUE,  TRUE,  0);

    gtk_container_add(GTK_CONTAINER(main_window), main_box);
    gtk_window_set_default_size(GTK_WINDOW(main_window), 640, 480);

    gtk_widget_show_all(main_window);
}

static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
    GError *err;
    gchar  *debug_info;

    gst_message_parse_error(msg, &err, &debug_info);
    g_printerr("Error received from element %s: %s\n",
               GST_OBJECT_NAME(msg->src), err->message);
    g_printerr("Debugging information; %s\n",
               debug_info ? debug_info : "none");
    g_clear_error(&err);
    g_free(debug_info);

    gtk_main_quit();
};

static void state_changed_cb(GstBus *bus, GstMessage *msg, CustomData *data)
{
    GstState old_state, new_state, pending_state;

    gst_message_parse_state_changed(msg, &old_state, &new_state,
                                    &pending_state);
    if (GST_MESSAGE_SRC(msg) == GST_OBJECT(data->pipeline))
    {
        data->state = new_state;

        g_print("State set to %s:\n", gst_element_state_get_name(new_state));
    }
}

int main(int argc, char *argv[])
{
    CustomData            data = {};
    GstBus               *bus;

    gtk_init(&argc, &argv);
    gst_init(&argc, &argv);

    data.source   = gst_element_factory_make("v4l2src", "source");
    data.convert  = gst_element_factory_make("videoconvert", "convert");
    data.sink     = gst_element_factory_make("ximagesink", "sink");
    data.pipeline = gst_pipeline_new("pipeline");

    gst_bin_add_many(GST_BIN(data.pipeline), data.source, data.convert,
                     data.sink, NULL);
    gst_element_link_many(data.source, data.convert, data.sink, NULL);

    g_object_set(data.source, "device", "/dev/video0", NULL);

    create_ui(&data);

    bus = gst_element_get_bus(data.pipeline);
    gst_bus_set_sync_handler(bus, (GstBusSyncHandler)bus_sync_handler,
                             &data, NULL);
    gst_bus_add_signal_watch(bus);
    g_signal_connect(G_OBJECT(bus), "message::error",
                     (GCallback)error_cb, &data);
    g_signal_connect(G_OBJECT(bus), "message::state-changed",
                     (GCallback)state_changed_cb, &data);
    gst_object_unref(bus);

    gst_element_set_state(data.pipeline, GST_STATE_PLAYING);

    gtk_main();

    gst_element_set_state(data.pipeline, GST_STATE_NULL);
    gst_object_unref(data.pipeline);
    return 0;
}

推荐阅读