我目前正在使用 IP 摄像机,我需要将其视频源显示到 QT 应用程序中。我已经成功创建了一个 Gstreamer 管道,可以正确显示提要并具有良好的延迟。这是管道:
rtspsrc location=rtsp://192.168.144.25:8554/main.264 latency=100 ! queue ! decodebin ! videoconvert ! gtksink name=sink sync=false
我不确定在应用程序中使用哪个接收器,我正在尝试构建,所以现在我使用 gtksink,但我愿意接受其他建议。
我使用 QT Designer 构建了一个非常简单的 QT 应用程序,其中包含播放和暂停按钮以及用于视频源的小部件:
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.15.10
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(1057, 891)
self.pushButton = QtWidgets.QPushButton(Form)
self.pushButton.setGeometry(QtCore.QRect(390, 140, 89, 25))
self.pushButton.setObjectName("pushButton")
self.pushButton_2 = QtWidgets.QPushButton(Form)
self.pushButton_2.setGeometry(QtCore.QRect(560, 140, 89, 25))
self.pushButton_2.setObjectName("pushButton_2")
self.status_label = QtWidgets.QLabel(Form)
self.status_label.setGeometry(QtCore.QRect(470, 60, 111, 17))
self.status_label.setObjectName("status_label")
self.video_widget = QtWidgets.QWidget(Form)
self.video_widget.setGeometry(QtCore.QRect(290, 240, 471, 311))
self.video_widget.setObjectName("video_widget")
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.pushButton.setText(_translate("Form", "Start"))
self.pushButton_2.setText(_translate("Form", "Stop"))
self.status_label.setText(_translate("Form", "Pipeline Status"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
以及 Python 应用程序本身:
import sys
import gi
gi.require_version('Gst', '1.0')
gi.require_version('Gtk', '3.0') # Specify the Gtk version
from gi.repository import Gst, Gtk, GLib
from PyQt5.QtGui import QWindow
from PyQt5.QtWidgets import QApplication, QMainWindow, QVBoxLayout, QWidget
from mainwindow_ui import Ui_Form
import platform
# Initialize GStreamer
Gst.init(None)
Gtk.init()
class VideoPlayer(QMainWindow, Ui_Form):
def __init__(self):
super(VideoPlayer, self).__init__()
self.setupUi(self)
self.pipeline = None
self.video_window = None # This will hold the window handle
self.pushButton.clicked.connect(self.start_pipeline)
self.pushButton_2.clicked.connect(self.stop_pipeline)
def start_pipeline(self):
if not self.pipeline:
# Create a DrawingArea for gtksink
drawing_area = Gtk.DrawingArea()
drawing_area.set_size_request(640, 480) # Set size if needed
GLib.timeout_add(500, drawing_area.realize)
#drawing_area.realize() # Ensure the widget is realized
# Create the pipeline with an initial gtksink
self.pipeline = Gst.parse_launch(
f'rtspsrc location=rtsp://192.168.144.25:8554/main.264 latency=100 ! '
f'queue ! decodebin ! videoconvert ! gtksink name=sink sync=false'
)
# Get the gtksink element
sink = self.pipeline.get_by_name("sink")
# Connect to the 'realize' signal of the embedded widget
widget = sink.get_property("widget")
widget.connect("realize", self.on_widget_realized)
# Start the pipeline
self.pipeline.set_state(Gst.State.PLAYING)
self.status_label.setText("Pipeline Running")
def on_widget_realized(self, widget):
self.video_window = widget.get_window().get_xid()
# Find the placeholder widget
video_placeholder = self.findChild(QWidget, "video_widget")
# Create a QWindow from the window ID
video_window = QWindow.fromWinId(self.video_window)
# Create the window container and embed the QWindow
container = QWidget.createWindowContainer(video_window, parent=video_placeholder)
# Add the container to the video placeholder's layout
layout = QVBoxLayout(video_placeholder)
layout.addWidget(container)
def stop_pipeline(self):
if self.pipeline:
# Stop the pipeline
self.pipeline.set_state(Gst.State.NULL)
self.pipeline = None
self.status_label.setText("Pipeline Stopped")
if __name__ == "__main__":
app = QApplication(sys.argv)
window = VideoPlayer()
window.show()
sys.exit(app.exec_())
现在它确实可以显示了,但我有多个问题。首先,当我启动应用程序时,Gstreamer 总是打开自己的 GTK 显示窗口,而且它似乎也链接到嵌入式窗口,因为如果我关闭它,它也会关闭嵌入式窗口。如果我尝试停止源并使用按钮再次启动它,它将在其自己的弹出窗口中重新启动,如前所述,而不是在 QT UI 窗口中。
我愿意接受任何解决方案,我想要的是一个 QR 应用程序,它可以显示我的视频源,没有任何弹出窗口,并且可以毫无问题地停止和重新启动。另外,我需要同时获得多个视频源,所有视频均来自相似的源(相同的相机型号)。
我正在分享用于在 Qt 接口内渲染管道的解决方案,其中包括将窗口句柄传递给接收器元素(在本例中为实现 GstVideoOverlay 接口的任何元素,例如 glimagesink、xvimagesink 等)
#!/usr/bin/env python3
import sys
from PyQt5.QtWidgets import QApplication, QWidget, QPushButton
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstVideo', '1.0')
from gi.repository import GObject, Gst, GstVideo
class GUI(QWidget):
def __init__(self):
super().__init__()
self.pipeline_widget = QWidget(self)
self.pipeline_widget.resize(500,500)
self.push_button = QPushButton(self)
self.push_button.setText(">||")
self.push_button.pressed.connect(self.play_pause)
self.playing = True
def closeEvent(self, a0) -> None:
global pipeline
pipeline.set_state(Gst.State.NULL)
super().closeEvent(a0)
sys.exit()
def play_pause(self):
global pipeline
if self.playing:
pipeline.set_state(Gst.State.PAUSED)
else:
pipeline.set_state(Gst.State.PLAYING)
self.playing = not self.playing
def main(args):
global pipeline
_ = QApplication(args)
w = GUI()
GObject.threads_init()
Gst.init(None)
pipeline_str = f'videotestsrc ! glimagesink name=sink'
pipeline = Gst.parse_launch(pipeline_str)
if not pipeline:
sys.stderr.write('could not create pipeline\n')
sys.exit(1)
sink = pipeline.get_by_name('sink')
win = w.pipeline_widget.winId() # retrieve the window handle
sink.set_window_handle(win) # pass the handle to the sink
loop = GObject.MainLoop()
pipeline.set_state(Gst.State.PLAYING)
w.show()
try:
loop.run()
except KeyboardInterrupt:
pass
except Exception as e:
print(e)
# cleanup
pipeline.set_state(Gst.State.NULL)
sys.exit()
if __name__ == '__main__':
main(sys.argv)
这会产生一个如下所示的窗口