一般咱們的播放引擎須要和GUI進行集成,在使用GStreamer時,GStreamre會負責媒體的播放及控制,GUI會負責處理用戶的交互操做以及建立顯示的窗口。本例中咱們將結合QT介紹如何指定GStreamer將視頻輸出到指定窗口,以及如何利用GStreamer上報的信息去更新GUI。html
咱們知道與GUI集成有兩個方面須要注意:linux
因爲顯示窗口一般由GUI框架建立,因此咱們須要將具體的窗口信息告訴GStreamer。因爲各個平臺使用不一樣的方式傳遞窗口句柄,GStreamer提供了一個抽象接口(GstVideoOverlay),用於屏蔽平臺的差別,咱們能夠直接將GUI建立的窗口ID傳遞給GStreamer。web
大多數GUI框架都須要在主線程中去作UI的刷新操做,但GStreamer內部可能會建立多個線程,這就須要經過GstBus及GUI自帶的通訊機制將全部GStreamer產生的消息傳遞到GUI主線程,再由GUI主線程對界面進行刷新。c#
下面咱們將以QT爲例來了解如何處理GStreamer與GUI框架的集成。app
qtoverlay.h框架
#ifndef _QTOVERLAY_ #define _QTOVERLAY_ #include <gst/gst.h> #include <QWidget> #include <QPushButton> #include <QHBoxLayout> #include <QVBoxLayout> #include <QSlider> #include <QTimer> class PlayerWindow : public QWidget { Q_OBJECT public: PlayerWindow(GstElement *p); WId getVideoWId() const ; static gboolean postGstMessage(GstBus * bus, GstMessage * message, gpointer user_data); private slots: void onPlayClicked() ; void onPauseClicked() ; void onStopClicked() ; void onAlbumAvaiable(const QString &album); void onState(GstState st); void refreshSlider(); void onSeek(); void onEos(); signals: void sigAlbum(const QString &album); void sigState(GstState st); void sigEos(); private: GstElement *pipeline; QPushButton *playBt; QPushButton *pauseBt; QPushButton *stopBt; QWidget *videoWindow; QSlider *slider; QHBoxLayout *buttonLayout; QVBoxLayout *playerLayout; QTimer *timer; GstState state; gint64 totalDuration; }; #endif
qtoverlay.cppide
#include <gst/video/videooverlay.h> #include <QApplication> #include "qtoverlay.h" PlayerWindow::PlayerWindow(GstElement *p) :pipeline(p) ,state(GST_STATE_NULL) ,totalDuration(GST_CLOCK_TIME_NONE) { playBt = new QPushButton("Play"); pauseBt = new QPushButton("Pause"); stopBt = new QPushButton("Stop"); videoWindow = new QWidget(); slider = new QSlider(Qt::Horizontal); timer = new QTimer(); connect(playBt, SIGNAL(clicked()), this, SLOT(onPlayClicked())); connect(pauseBt, SIGNAL(clicked()), this, SLOT(onPauseClicked())); connect(stopBt, SIGNAL(clicked()), this, SLOT(onStopClicked())); connect(slider, SIGNAL(sliderReleased()), this, SLOT(onSeek())); buttonLayout = new QHBoxLayout; buttonLayout->addWidget(playBt); buttonLayout->addWidget(pauseBt); buttonLayout->addWidget(stopBt); buttonLayout->addWidget(slider); playerLayout = new QVBoxLayout; playerLayout->addWidget(videoWindow); playerLayout->addLayout(buttonLayout); this->setLayout(playerLayout); connect(timer, SIGNAL(timeout()), this, SLOT(refreshSlider())); connect(this, SIGNAL(sigAlbum(QString)), this, SLOT(onAlbumAvaiable(QString))); connect(this, SIGNAL(sigState(GstState)), this, SLOT(onState(GstState))); connect(this, SIGNAL(sigEos()), this, SLOT(onEos())); } WId PlayerWindow::getVideoWId() const { return videoWindow->winId(); } void PlayerWindow::onPlayClicked() { GstState st = GST_STATE_NULL; gst_element_get_state (pipeline, &st, NULL, GST_CLOCK_TIME_NONE); if (st < GST_STATE_PAUSED) { // Pipeline stopped, we need set overlay again GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); WId xwinid = getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); } gst_element_set_state (pipeline, GST_STATE_PLAYING); } void PlayerWindow::onPauseClicked() { gst_element_set_state (pipeline, GST_STATE_PAUSED); } void PlayerWindow::onStopClicked() { gst_element_set_state (pipeline, GST_STATE_NULL); } void PlayerWindow::onAlbumAvaiable(const QString &album) { setWindowTitle(album); } void PlayerWindow::onState(GstState st) { if (state != st) { state = st; if (state == GST_STATE_PLAYING){ timer->start(1000); } if (state < GST_STATE_PAUSED){ timer->stop(); } } } void PlayerWindow::refreshSlider() { gint64 current = GST_CLOCK_TIME_NONE; if (state == GST_STATE_PLAYING) { if (!GST_CLOCK_TIME_IS_VALID(totalDuration)) { if (gst_element_query_duration (pipeline, GST_FORMAT_TIME, &totalDuration)) { slider->setRange(0, totalDuration/GST_SECOND); } } if (gst_element_query_position (pipeline, GST_FORMAT_TIME, ¤t)) { g_print("%ld / %ld\n", current/GST_SECOND, totalDuration/GST_SECOND); slider->setValue(current/GST_SECOND); } } } void PlayerWindow::onSeek() { gint64 pos = slider->sliderPosition(); g_print("seek: %ld\n", pos); gst_element_seek_simple (pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH , pos * GST_SECOND); } void PlayerWindow::onEos() { gst_element_set_state (pipeline, GST_STATE_NULL); } gboolean PlayerWindow::postGstMessage(GstBus * bus, GstMessage * message, gpointer user_data) { PlayerWindow *pw = NULL; if (user_data) { pw = reinterpret_cast<PlayerWindow*>(user_data); } switch (GST_MESSAGE_TYPE(message)) { case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (message, &old_state, &new_state, &pending_state); pw->sigState(new_state); break; } case GST_MESSAGE_TAG: { GstTagList *tags = NULL; gst_message_parse_tag(message, &tags); gchar *album= NULL; if (gst_tag_list_get_string(tags, GST_TAG_ALBUM, &album)) { pw->sigAlbum(album); g_free(album); } gst_tag_list_unref(tags); break; } case GST_MESSAGE_EOS: { pw->sigEos(); break; } default: break; } return TRUE; } int main(int argc, char *argv[]) { gst_init (&argc, &argv); QApplication app(argc, argv); app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ())); // prepare the pipeline GstElement *pipeline = gst_parse_launch ("playbin uri=file:///home/john/video/sintel_trailer-480p.webm", NULL); // prepare the ui PlayerWindow *window = new PlayerWindow(pipeline); window->resize(900, 600); window->show(); // seg window id to gstreamer GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); WId xwinid = window->getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); // connect to interesting signals GstBus *bus = gst_element_get_bus(pipeline); gst_bus_add_watch(bus, &PlayerWindow::postGstMessage, window); gst_object_unref(bus); // run the pipeline GstStateChangeReturn sret = gst_element_set_state (pipeline, GST_STATE_PLAYING); if (sret == GST_STATE_CHANGE_FAILURE) { gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); // Exit application QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit())); } int ret = app.exec(); window->hide(); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return ret; }
qtoverlay.pro函數
QT += core gui widgets TARGET = qtoverlay INCLUDEPATH += /usr/include/glib-2.0 INCLUDEPATH += /usr/lib/x86_64-linux-gnu/glib-2.0/include INCLUDEPATH += /usr/include/gstreamer-1.0 INCLUDEPATH += /usr/lib/x86_64-linux-gnu/gstreamer-1.0/include LIBS += -lgstreamer-1.0 -lgobject-2.0 -lglib-2.0 -lgstvideo-1.0 SOURCES += qtoverlay.cpp HEADERS += qtoverlay.h
分別保存以上內容到各個文件,執行下列命令便可獲得可執行程序。若是找不到頭文件及庫文件,須要根據實際路徑修改qtoverlay.pro文件中的內容。oop
qmake -o Makefile qtoverlay.pro make
// prepare the pipeline GstElement *pipeline = gst_parse_launch ("playbin uri=file:///home/jleng/video/sintel_trailer-480p.webm", NULL); // prepare the ui PlayerWindow *window = new PlayerWindow(pipeline); window->resize(900, 600); window->show();
在main函數中對GStreamer進行初始化及建立了QT的應用對象後,構造了Pipline,構造GUI窗口對象。在PlayerWindow的構造函數中初始化按鈕及窗口,同時建立定時刷新進度條的Timer。源碼分析
// seg window id to gstreamer GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); WId xwinid = window->getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); ... gst_bus_add_watch(bus, &PlayerWindow::postGstMessage, window); ... GstStateChangeReturn sret = gst_element_set_state (pipeline, GST_STATE_PLAYING); ... int ret = app.exec(); ...
接着咱們單首創建了ximagesink用於視頻渲染,同時咱們將Qt建立的視頻窗口ID設置給GStreamer,讓GStreamer獲得渲染的窗口ID,接着使用g_object_set()將自定義的Sink經過「video-sink」屬性設置到playbin中。
同時,咱們設置了GStreamer的消息處理函數,全部的消息都會在postGstMessage函數中被轉發。爲了後續調用GUI對象中的接口,咱們須要將GUI窗口指針做爲user-data,在postGstMessage中再轉換爲GUI對象。
接着設置Pipeline的狀態爲PLAYING開始播放。
最後調用GUI框架的事件循環,exec()會一直執行,直到關閉窗口。
因爲GStreamer的GstBus會默認使用GLib的主循環及事件處理機制,因此必需要保證GLi默認的MainLoop在某個線程中運行。在本例中,Qt在Linux下會自動使用GLib的主循環,因此咱們無需額外進行處理。
gboolean PlayerWindow::postGstMessage(GstBus * bus, GstMessage * message, gpointer user_data) { PlayerWindow *pw = NULL; if (user_data) { pw = reinterpret_cast<PlayerWindow*>(user_data); } switch (GST_MESSAGE_TYPE(message)) { case GST_MESSAGE_STATE_CHANGED: { GstState old_state, new_state, pending_state; gst_message_parse_state_changed (message, &old_state, &new_state, &pending_state); pw->sigState(new_state); break; } case GST_MESSAGE_TAG: { GstTagList *tags = NULL; gst_message_parse_tag(message, &tags); gchar *album= NULL; if (gst_tag_list_get_string(tags, GST_TAG_ALBUM, &album)) { pw->sigAlbum(album); g_free(album); } gst_tag_list_unref(tags); break; } case GST_MESSAGE_EOS: { pw->sigEos(); break; } default: break; } return TRUE; }
咱們在轉換後GUI對象後,再根據消息類型進行處理。在postGstMessage中咱們沒有直接更新GUI,由於GStreamer的Bus處理線程與GUI主線程可能爲不一樣線程,直接更新GUI會出錯或無效。所以利用Qt的signal-slot機制在相應的槽函數中就行GUI信息的更新。這裏只處理了3種消息STATE_CHANGED(狀態變化),TAG(媒體元數據及編碼信息),EOS(播放結束),GStreamer所支持的消息可查看官方文檔GstMessage。
void PlayerWindow::onPlayClicked() { GstState st = GST_STATE_NULL; gst_element_get_state (pipeline, &st, NULL, GST_CLOCK_TIME_NONE); if (st < GST_STATE_PAUSED) { // Pipeline stopped, we need set overlay again GstElement *vsink = gst_element_factory_make ("ximagesink", "vsink"); g_object_set(GST_OBJECT(pipeline), "video-sink", vsink, NULL); WId xwinid = getVideoWId(); gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (vsink), xwinid); } gst_element_set_state (pipeline, GST_STATE_PLAYING); }
當點擊Play按鈕時,onPlayClicked函數會被調用,咱們在此直接調用GStreamer的接口設置Pipeline的狀態。當播放結束或點擊Stop時,GStreamer會在狀態切換到NULL時釋放全部資源,因此咱們在此須要從新設置playbin的vido-sink,並指定視頻輸出窗口。
Pause,Stop的處理相似,直接調用gst_element_set_state ()將Pipeline設置爲相應狀態。
void PlayerWindow::refreshSlider() { gint64 current = GST_CLOCK_TIME_NONE; if (state == GST_STATE_PLAYING) { if (!GST_CLOCK_TIME_IS_VALID(totalDuration)) { if (gst_element_query_duration (pipeline, GST_FORMAT_TIME, &totalDuration)) { slider->setRange(0, totalDuration/GST_SECOND); } } if (gst_element_query_position (pipeline, GST_FORMAT_TIME, ¤t)) { g_print("%ld / %ld\n", current/GST_SECOND, totalDuration/GST_SECOND); slider->setValue(current/GST_SECOND); } } } void PlayerWindow::onSeek() { gint64 pos = slider->sliderPosition(); g_print("seek: %ld\n", pos); gst_element_seek_simple (pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH , pos * GST_SECOND); }
咱們在構造函數中建立了Timer用於每秒刷新進度條,在refreshSlider被調用時,咱們經過gst_element_query_duration() 和gst_element_query_position ()獲得文件的總時間和當前時間,並刷新進度條。因爲GStreamer返回時間單位爲納秒,因此咱們須要經過GST_SECOND將其轉換爲秒用於時間顯示。
咱們一樣處理了用戶的Seek操做,在拉動進度條到某個位置時,獲取Seek的位置,調用gst_element_seek_simple ()跳轉到指定位置。咱們不用關心對GStreamer的調用是處於哪一個線程,GStreamer內部會自動進行處理。
經過本文,咱們學習到:
https://gstreamer.freedesktop.org/documentation/video/gstvideooverlay.html?gi-language=c
https://gstreamer.freedesktop.org/documentation/tutorials/basic/toolkit-integration.html?gi-language=c
https://doc.qt.io/qt-5/qmake-manual.html