GStreamer开发简单MP4播放器(一)
1 前言工作一年,兜兜转转,研究生期间从事物联网前后端开发,接触过bootstrap,mysql,vue,element,dotnet core,three.js,了解过不少东西,但是都是一些肤浅的东西,没有深入研究。工作后从事变频器开发,主要是电机控制算法仿真以及编码实现,TI dsp芯片的底层配置和应用,又进入了一个全新的领域,一下子进入了嵌入式软件领域,弥补了学校期间开发板编程调试能力欠缺的
1 前言
工作一年,兜兜转转,研究生期间从事物联网前后端开发,接触过bootstrap,mysql,vue,element,dotnet core,three.js,了解过不少东西,但是都是一些肤浅的东西,没有深入研究。工作后从事变频器开发,主要是电机控制算法仿真以及编码实现,TI dsp芯片的底层配置和应用,又进入了一个全新的领域,一下子进入了嵌入式软件领域,弥补了学校期间开发板编程调试能力欠缺的不足。后来因个人原因更换了工作,也就脱离了工业自动化这个行业。
不说废话了,最近因工作需求接触了GStreamer,看了两天文档资料感觉功能很强大,使用插件插拔的思想就可以实现一个简单的多媒体播放器,本来想再结合QT做一个简单的界面,能实音量调节,seek等功能,奈何女朋友催着陪她,先简单的实现一下功能。
2 实现过程
代码实现基本上是在官网提供案例的基础上,再结合网上的一些博客资料整合而成,所以谈不上有自己的见解,仅供自己后续复习以及初学者参考。
1 创建结构体
方便回调函数传参。
typedef struct _ST_CUSTOMDATA {
GstElement *gstpPipeline;
GstElement *mediaSource;
GstElement *mediaDemuxer;
GstElement *audioQueue;
GstElement *videoQueue;
GstElement *audioDecoder;
GstElement *videoDecoder;
GstElement *audioConvert;
GstElement *videoConvert;
GstElement *audioSink;
GstElement *videoSink;
} ST_CUSTOMDATA ;
ST_CUSTOMDATA customData;
需要注意的是结构体必须定义为全局变量,不然程序会crash,原因是创建pad的回调函数接收到的ST_CUSTOMDATA指针为野指针,暂时还没想明白。
2 创建消息处理回调函数
bus消息监听使用主循环+gst_bus_add_watch ()函数的方式实现。
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
3 pad连接回调函数
static void on_pad_added (GstElement *element,GstPad *pad, ST_CUSTOMDATA *data)
{
gchar *pad_name = NULL;
g_object_get(pad, "name", &pad_name, NULL);
if(g_str_has_prefix(pad_name, "video"))
{
GstPad *v_sinkpad = NULL;
v_sinkpad = gst_element_get_static_pad (data->videoQueue, "sink");
gst_pad_link (pad, v_sinkpad);
gst_object_unref (v_sinkpad);
}
if(g_str_has_prefix(pad_name, "audio"))
{
GstPad *a_sinkpad;
a_sinkpad = gst_element_get_static_pad (data->audioQueue, "sink");
if(!gst_pad_link (pad, a_sinkpad))
g_print("%s linked to adecoder.\n", pad_name);
else
g_print("ERROR: gst_pad_link (pad, a_sinkpad), pad_name = %s\n",pad_name);
gst_object_unref (a_sinkpad);
}
}
4 main函数
初始化demux以及decoder的时候需要注意其类型。
int main(int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
guint bus_watch_id;
// Initialisation
gst_init (&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
customData.gstpPipeline = gst_pipeline_new("mp4-player");
customData.mediaSource = gst_element_factory_make("filesrc", "file-source");
customData.mediaDemuxer = gst_element_factory_make("qtdemux", "qt-demuxer");
// create video elements
customData.videoQueue = gst_element_factory_make ("queue", "video-queue");
customData.videoDecoder = gst_element_factory_make("avdec_h264", "h264-decoder");
customData.videoConvert = gst_element_factory_make ("videoconvert", "video-converter");
customData.videoSink = gst_element_factory_make ("ximagesink", "video-output");
// create audio elements
customData.audioQueue = gst_element_factory_make ("queue", "audio-queue");
customData.audioDecoder = gst_element_factory_make("faad", "au-decoder");
customData.audioConvert = gst_element_factory_make("audioconvert", "au-converter");
customData.audioSink = gst_element_factory_make("autoaudiosink", "au-output");
if (!customData.gstpPipeline || !customData.videoQueue ||
!customData.mediaSource || !customData.mediaDemuxer ||
!customData.videoQueue || !customData.videoDecoder ||
!customData.videoConvert || !customData.videoSink ||
!customData.audioQueue || !customData.audioDecoder ||
!customData.audioConvert || !customData.audioSink)
{
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
// set the input filename to the source element
g_object_set (G_OBJECT(customData.mediaSource), "location", MP4_SOURCE, NULL);
//add a message handler
bus = gst_pipeline_get_bus (GST_PIPELINE (customData.gstpPipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// add all elements into the pipeline
gst_bin_add_many (GST_BIN (customData.gstpPipeline),
customData.mediaSource, customData.mediaDemuxer,
customData.videoQueue, customData.videoDecoder,
customData.videoConvert, customData.videoSink,
customData.audioQueue, customData.audioDecoder,
customData.audioConvert, customData.audioSink,
NULL);
// link the elements together
gst_element_link(customData.mediaSource, customData.mediaDemuxer);
gst_element_link_many(customData.videoQueue, customData.videoDecoder,
customData.videoConvert, customData.videoSink, NULL);
gst_element_link_many(customData.audioQueue, customData.audioDecoder,
customData.audioConvert,customData.audioSink);
g_signal_connect (customData.mediaDemuxer, "pad-added", G_CALLBACK (on_pad_added), &customData);
/* note that the demuxer will be linked to the decoder dynamically.
The reason is that Ogg may contain various streams (for example
audio and video). The source pad(s) will be created at run time,
by the demuxer when it detects the amount and nature of streams.
Therefore we connect a callback function which will be executed
when the "pad-added" is emitted.*/
// Set the pipeline to "playing" state
gst_element_set_state (customData.gstpPipeline, GST_STATE_PLAYING);
// Iterate
g_print ("Running...\n");
g_main_loop_run (loop);
// Out of the main loop, clean up nicely
g_print ("Returned, stopping playback\n");
gst_element_set_state (customData.gstpPipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (customData.gstpPipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
5编译
使用的qt creator 开发的,需要在.pro中加入GStreamer依赖。最近还看了meson build,感觉比makefile和cmake更好理解上手,有兴趣的可以研究一下。
INCLUDEPATH += /usr/include/glib-2.0
INCLUDEPATH += /usr/lib/x86_64-linux-gnu/glib-2.0/include
INCLUDEPATH += /usr/include/gstreamer-1.0
INCLUDEPATH += /usr/lib/x86_64-linux-gnu/gstreamer-1.0/include
LIBS +=-lglib-2.0
LIBS +=-lgobject-2.0
LIBS +=-lgstreamer-1.0 # <gst/gst.h>
LIBS +=-lgstvideo-1.0 # <gst/video/videooverlay.h>
LIBS +=-L/usr/lib/x86_64-linux-gnu/gstreamer-1.0
LIBS +=-lgstautodetect
6 实现效果
3 完整代码
感觉还是上手挺快的,两三个小时就可以自己折腾一个视频播放器,暂时附上完整代码。
完整工程下载链接
#include "mainwindow.h"
#include <QApplication>
#include <gst/gst.h>
//#define RTSP_SOURCE "rtmp://58.200.131.2:1935/livetv/hunantv)"
#define MP4_SOURCE "./movie1.mp4"
typedef struct _ST_CUSTOMDATA {
GstElement *gstpPipeline;
GstElement *mediaSource;
GstElement *mediaDemuxer;
GstElement *audioQueue;
GstElement *videoQueue;
GstElement *audioDecoder;
GstElement *videoDecoder;
GstElement *audioConvert;
GstElement *videoConvert;
GstElement *audioSink;
GstElement *videoSink;
} ST_CUSTOMDATA ;
ST_CUSTOMDATA customData;
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added (GstElement *element,GstPad *pad, ST_CUSTOMDATA *data)
{
gchar *pad_name = NULL;
g_object_get(pad, "name", &pad_name, NULL);
if(g_str_has_prefix(pad_name, "video"))
{
GstPad *v_sinkpad = NULL;
v_sinkpad = gst_element_get_static_pad (data->videoQueue, "sink");
gst_pad_link (pad, v_sinkpad);
gst_object_unref (v_sinkpad);
}
if(g_str_has_prefix(pad_name, "audio"))
{
GstPad *a_sinkpad;
a_sinkpad = gst_element_get_static_pad (data->audioQueue, "sink");
if(!gst_pad_link (pad, a_sinkpad))
g_print("%s linked to adecoder.\n", pad_name);
else
g_print("ERROR: gst_pad_link (pad, a_sinkpad), pad_name = %s\n",pad_name);
gst_object_unref (a_sinkpad);
}
}
int main(int argc, char *argv[])
{
GMainLoop *loop;
GstBus *bus;
guint bus_watch_id;
// Initialisation
gst_init (&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
customData.gstpPipeline = gst_pipeline_new("mp4-player");
customData.mediaSource = gst_element_factory_make("filesrc", "file-source");
customData.mediaDemuxer = gst_element_factory_make("qtdemux", "qt-demuxer");
// create video elements
customData.videoQueue = gst_element_factory_make ("queue", "video-queue");
customData.videoDecoder = gst_element_factory_make("avdec_h264", "h264-decoder");
customData.videoConvert = gst_element_factory_make ("videoconvert", "video-converter");
customData.videoSink = gst_element_factory_make ("ximagesink", "video-output");
// create audio elements
customData.audioQueue = gst_element_factory_make ("queue", "audio-queue");
customData.audioDecoder = gst_element_factory_make("faad", "au-decoder");
customData.audioConvert = gst_element_factory_make("audioconvert", "au-converter");
customData.audioSink = gst_element_factory_make("autoaudiosink", "au-output");
if (!customData.gstpPipeline || !customData.videoQueue ||
!customData.mediaSource || !customData.mediaDemuxer ||
!customData.videoQueue || !customData.videoDecoder ||
!customData.videoConvert || !customData.videoSink ||
!customData.audioQueue || !customData.audioDecoder ||
!customData.audioConvert || !customData.audioSink)
{
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
// set the input filename to the source element
g_object_set (G_OBJECT(customData.mediaSource), "location", MP4_SOURCE, NULL);
//add a message handler
bus = gst_pipeline_get_bus (GST_PIPELINE (customData.gstpPipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
// add all elements into the pipeline
gst_bin_add_many (GST_BIN (customData.gstpPipeline),
customData.mediaSource, customData.mediaDemuxer,
customData.videoQueue, customData.videoDecoder,
customData.videoConvert, customData.videoSink,
customData.audioQueue, customData.audioDecoder,
customData.audioConvert, customData.audioSink,
NULL);
// link the elements together
gst_element_link(customData.mediaSource, customData.mediaDemuxer);
gst_element_link_many(customData.videoQueue, customData.videoDecoder,
customData.videoConvert, customData.videoSink, NULL);
gst_element_link_many(customData.audioQueue, customData.audioDecoder,
customData.audioConvert,customData.audioSink);
g_signal_connect (customData.mediaDemuxer, "pad-added", G_CALLBACK (on_pad_added), &customData);
/* note that the demuxer will be linked to the decoder dynamically.
The reason is that Ogg may contain various streams (for example
audio and video). The source pad(s) will be created at run time,
by the demuxer when it detects the amount and nature of streams.
Therefore we connect a callback function which will be executed
when the "pad-added" is emitted.*/
// Set the pipeline to "playing" state
gst_element_set_state (customData.gstpPipeline, GST_STATE_PLAYING);
// Iterate
g_print ("Running...\n");
g_main_loop_run (loop);
// Out of the main loop, clean up nicely
g_print ("Returned, stopping playback\n");
gst_element_set_state (customData.gstpPipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (customData.gstpPipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
更多推荐
所有评论(0)