我想编写一个程序,通过多播或广播 rtsp/rtp 服务器传输 cv2 帧。
我在下面编写了一些示例代码,但是使用此代码,我只能使用第一个客户端进行流传输,而所有其他客户端在第一次运行后无法获取图像流(在 ffplay 和 vlc 上测试,url 是
rtsp://host_url:5000/stream
) .
这段代码还有需要改进的地方吗?
我认为第一个客户端会获取所有发出的块......
class SensorFactory(GstRtspServer.RTSPMediaFactory):
def __init__(self, image_type):
super(SensorFactory, self).__init__()
self.number_frames = 0
self.fps = 30.0
self.duration = 1.0 / self.fps * Gst.SECOND # duration of a frame in gst units
self.launch_string = 'appsrc name=source is-live=true format=GST_FORMAT_TIME ' \
'caps=video/x-raw,format=BGR,width=640,height=360,framerate=30/1 ' \
'! videoconvert ! video/x-raw,format=I420 ' \
'! x264enc speed-preset=ultrafast tune=zerolatency ' \
'! rtph264pay name=pay0 pt=96'
self.image_type = image_type
def do_create_element(self, url):
return Gst.parse_launch(self.launch_string)
def on_need_data(self, src, length):
# frame_list is a Manager.list() containing cv2 frame
data = cv2.resize(frame_list[self.image_type], (640, 360), interpolation = cv2.INTER_LINEAR)
data = data.tobytes()
buf = Gst.Buffer.new_allocate(None, len(data), None)
buf.fill(0, data)
buf.duration = self.duration
timestamp = self.number_frames * self.duration
buf.pts = buf.dts = int(timestamp)
buf.offset = timestamp
self.number_frames += 1
src.emit('push-buffer', buf)
# attaching the source element to the rtsp media
def do_configure(self, rtsp_media):
self.number_frames = 0
appsrc = rtsp_media.get_element().get_child_by_name('source')
appsrc.connect('need-data', self.on_need_data)
class MulticastServer(GstRtspServer.RTSPServer):
def __init__(self):
GstRtspServer.RTSPServer.__init__(self)
self.set_service("5000")
self.factory0 = SensorFactory(0)
self.factory0.set_shared(True)
self.factory0.set_eos_shutdown(True)
# Multicast address setup
self.address_pool = GstRtspServer.RTSPAddressPool.new()
self.address_pool.add_range("224.0.0.1", "240.0.0.10", 5000, 5010, 5)
self.factory0.set_address_pool(self.address_pool)
self.get_mount_points().add_factory("/stream", self.factory0)
Gst.init(None)
server = MulticastServer()
server.attach(None)
GLib.MainLoop().run()
这可能不是最佳解决方案,但这个简单的方案可能适合您的情况:
import cv2
# Here we simulate a frame source using gstreamer backend from a videotestsrc element, using BGR format as expected by opencv appsink
cap = cv2.VideoCapture('videotestsrc ! video/x-raw,format=BGR,width=640,height=480 ! queue ! appsink drop=1', cv2.CAP_GSTREAMER)
if not cap.isOpened() :
print('Error: failed to open capture')
exit(-1)
w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = float(cap.get(cv2.CAP_PROP_FPS))
print('Capture opened, framing %dx%d@%f' % (w,h,fps))
# Now create a VideoWriter with gstreamer backend (4CC code is 0:RAW), using a pipeline that will convert BGR frames from opencv, encode into H264 and stream as RTPH264 to localhost over UDP/5000:
rtph264 = cv2.VideoWriter('appsrc ! video/x-raw,format=BGR ! queue ! videoconvert ! x264enc key-int-max=30 insert-vui=1 speed-preset=ultrafast tune=zerolatency ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5000', cv2.CAP_GSTREAMER, 0, fps, (w,h))
if not rtph264.isOpened() :
print('Error: failed to open rtph264')
exit(-2)
# Loop reading a frame from capture and pushing it into rtph264 writer:
while True:
ret, frame = cap.read()
if not ret:
print('Read frame failed')
break
rtph264.write(frame)
rtph264.release()
cap.release()
import gi
gi.require_version('Gst','1.0')
gi.require_version('GstVideo','1.0')
gi.require_version('GstRtspServer','1.0')
from gi.repository import GLib, Gst, GstVideo, GstRtspServer
Gst.init(None)
mainloop = GLib.MainLoop()
server = GstRtspServer.RTSPServer()
mounts = server.get_mount_points()
factory = GstRtspServer.RTSPMediaFactory()
factory.set_launch('( udpsrc address=127.0.0.1 port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! rtph264pay name=pay0 )')
mounts.add_factory("/test", factory)
server.attach(None)
print ("stream ready at rtsp://127.0.0.1:8554/test")
mainloop.run()
gst-play-1.0 rtsp://127.0.0.1:8554/test