在官方文档基础教程 1:Hello world! (gstreamer.freedesktop.org) 中,我们看到可以使用playbin来实现播放,
在基础教程 2:GStreamer 概念 中,可以看到element的link来实现播放,即使用api程序来实现类似gst-launch-1.0 videotestsrc ! autovideosink 的播放效果。
那怎么播放mp4文件呢,搜一下,可以看到这样的link方式
gst-launch-1.0 filesrc location=test.mp4 ! decodebin ! autovideosink
仿照基础教程2中的例子,似乎就可以实现了,
如果真这么做了,会发现,link失败,因为有时候需要动态连接,选择合适的pad去进行连接,
参考基础教程 3:动态管道 (gstreamer.freedesktop.org),
里面的代码可以再添加一个autovideosink的element连接来显示视频。
关键的地方在于对pad-added信号的监听,及相应的回调处理
/* Connect to the pad-added signal */
g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);
知道了这个思路,就可以让AI来实现了,以下是通义生成的修改后的例子
#include <gst/gst.h>
//#include <gst/app/gstappsink.h>
#define FILE_PATH "D:\\2024\\06\\Test\\test.mp4"
static gboolean bus_call(GstBus* bus, GstMessage* msg, gpointer data) {
GMainLoop* loop = (GMainLoop*)data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print("End-of-stream reached.\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR: {
gchar* debug;
GError* error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(msg->src), error->message);
g_clear_error(&error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
static void pad_added_callback(GstElement* decodebin, GstPad* pad, gpointer user_data);
int main(int argc, char* argv[]) {
GstElement* pipeline, * source, * decodebin, * videosink;
GstBus* bus;
GMainLoop* loop;
GstPad* src_pad, * sink_pad;
GstCaps* caps;
GstStateChangeReturn ret;
/* 初始化GStreamer */
gst_init(&argc, &argv);
/* 创建管道 */
pipeline = gst_pipeline_new("playback-pipeline");
/* 创建source元素 */
source = gst_element_factory_make("filesrc", "file-source");
g_object_set(source, "location", FILE_PATH, NULL);
/* 创建decodebin元素 */
decodebin = gst_element_factory_make("decodebin", "decoder");
/* 创建视频sink元素 */
videosink = gst_element_factory_make("autovideosink", "video-sink");
/* 将元素添加到管道 */
gst_bin_add_many(GST_BIN(pipeline), source, decodebin, videosink, NULL);
/* 链接source和decodebin */
if (gst_element_link(source, decodebin) != TRUE) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(pipeline);
return -1;
}
/* 监听decodebin的pad-added信号 */
g_signal_connect(decodebin, "pad-added", G_CALLBACK(pad_added_callback), videosink);
/* 创建总线并添加监听 */
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
loop = g_main_loop_new(NULL, FALSE);
gst_bus_add_watch(bus, G_CALLBACK(bus_call), loop);
gst_object_unref(bus);
/* 运行管道 */
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
goto cleanup;
}
/* 运行主循环 */
g_print("Press Ctrl+C to stop playback...\n");
g_main_loop_run(loop);
cleanup:
/* 清理 */
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
g_main_loop_unref(loop);
return 0;
}
/* pad-added回调函数 */
static void pad_added_callback(GstElement* decodebin, GstPad* pad, gpointer user_data) {
GstElement* videosink = (GstElement*)user_data;
GstPad* sink_pad;
GstCaps* pad_caps;
GstStructure* pad_struct;
const gchar* pad_type;
/* 获取sink pad */
sink_pad = gst_element_get_static_pad(videosink, "sink");
/* 获取pad的caps */
pad_caps = gst_pad_get_current_caps(pad);
pad_struct = gst_caps_get_structure(pad_caps, 0);
pad_type = gst_structure_get_name(pad_struct);
g_print("Received new pad '%s' with type '%s'.\n", GST_PAD_NAME(pad), pad_type);
/* 如果是视频类型,链接pad */
if (g_strcmp0(pad_type, "video/x-raw") == 0) {
/* 尝试链接pad */
if (gst_pad_link(pad, sink_pad) == GST_PAD_LINK_OK) {
g_print("Linked pad '%s' to sink pad '%s'.\n", GST_PAD_NAME(pad), GST_PAD_NAME(sink_pad));
}
else {
g_print("Link failed between pad '%s' and sink pad '%s'.\n", GST_PAD_NAME(pad), GST_PAD_NAME(sink_pad));
}
}
/* 清理 */
// gst_object_unref((pad_caps));
gst_caps_unref(pad_caps);
gst_object_unref((sink_pad));
}