这就是我正在尝试做的事情:
WebRTC endpoint > RTP Endpoint > ffmpeg > RTMP server.
这就是我的 SDP 文件的样子。
var cm_offer = "v=0\n" +
"o=- 3641290734 3641290734 IN IP4 127.0.0.1\n" +
"s=nginx\n" +
"c=IN IP4 127.0.0.1\n" +
"t=0 0\n" +
"m=audio 60820 RTP/AVP 0\n" +
"a=rtpmap:0 PCMU/8000\n" +
"a=recvonly\n" +
"m=video 59618 RTP/AVP 101\n" +
"a=rtpmap:101 H264/90000\n" +
"a=recvonly\n";
发生的情况是,wireshark 可以检测端口 59618 处的传入数据包,但不是 RTP 数据包,而是 UDP 数据包。我正在尝试使用 ffmpeg 使用以下命令捕获数据包:
ubuntu@ip-132-31-40-100:~$ ffmpeg -i udp://127.0.0.1:59618 -vcodec copy stream.mp4
ffmpeg version git-2017-01-22-f1214ad Copyright (c) 2000-2017 the FFmpeg developers
built with gcc 4.8 (Ubuntu 4.8.4-2ubuntu1~14.04.3)
configuration: --extra-libs=-ldl --prefix=/opt/ffmpeg --mandir=/usr/share/man --enable-avresample --disable-debug --enable-nonfree --enable-gpl --enable-version3 --enable-libopencore-amrnb --enable-libopencore-amrwb --disable-decoder=amrnb --disable-decoder=amrwb --enable-libpulse --enable-libfreetype --enable-gnutls --enable-libx264 --enable-libx265 --enable-libfdk-aac --enable-libvorbis --enable-libmp3lame --enable-libopus --enable-libvpx --enable-libspeex --enable-libass --enable-avisynth --enable-libsoxr --enable-libxvid --enable-libvidstab --enable-libwavpack --enable-nvenc
libavutil 55. 44.100 / 55. 44.100
libavcodec 57. 75.100 / 57. 75.100
libavformat 57. 63.100 / 57. 63.100
libavdevice 57. 2.100 / 57. 2.100
libavfilter 6. 69.100 / 6. 69.100
libavresample 3. 2. 0 / 3. 2. 0
libswscale 4. 3.101 / 4. 3.101
libswresample 2. 4.100 / 2. 4.100
libpostproc 54. 2.100 / 54. 2.100
我得到的只是一个闪烁的光标,并且退出(ctrl+c)后,stream.mp4 文件不会写入磁盘。
那么你能帮我弄清楚吗:
这是整个代码(hello world教程已修改)
/*
* (C) Copyright 2014-2015 Kurento (http://kurento.org/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
function getopts(args, opts)
{
var result = opts.default || {};
args.replace(
new RegExp("([^?=&]+)(=([^&]*))?", "g"),
function($0, $1, $2, $3) { result[$1] = decodeURI($3); });
return result;
};
var args = getopts(location.search,
{
default:
{
ws_uri: 'wss://' + location.hostname + ':8433/kurento',
ice_servers: undefined
}
});
function setIceCandidateCallbacks(webRtcPeer, webRtcEp, onerror)
{
webRtcPeer.on('icecandidate', function(candidate) {
console.log("Local candidate:",candidate);
candidate = kurentoClient.getComplexType('IceCandidate')(candidate);
webRtcEp.addIceCandidate(candidate, onerror)
});
webRtcEp.on('OnIceCandidate', function(event) {
var candidate = event.candidate;
console.log("Remote candidate:",candidate);
webRtcPeer.addIceCandidate(candidate, onerror);
});
}
function setIceCandidateCallbacks2(webRtcPeer, rtpEp, onerror)
{
webRtcPeer.on('icecandidate', function(candidate) {
console.log("Localr candidate:",candidate);
candidate = kurentoClient.getComplexType('IceCandidate')(candidate);
rtpEp.addIceCandidate(candidate, onerror)
});
}
window.addEventListener('load', function()
{
console = new Console();
var webRtcPeer;
var pipeline;
var webRtcEpt;
var videoInput = document.getElementById('videoInput');
var videoOutput = document.getElementById('videoOutput');
var startButton = document.getElementById("start");
var stopButton = document.getElementById("stop");
startButton.addEventListener("click", function()
{
showSpinner(videoInput, videoOutput);
var options = {
localVideo: videoInput,
remoteVideo: videoOutput
};
if (args.ice_servers) {
console.log("Use ICE servers: " + args.ice_servers);
options.configuration = {
iceServers : JSON.parse(args.ice_servers)
};
} else {
console.log("Use freeice")
}
webRtcPeer = kurentoUtils.WebRtcPeer.WebRtcPeerSendrecv(options, function(error)
{
if(error) return onError(error)
this.generateOffer(onOffer)
});
function onOffer(error, sdpOffer)
{
if(error) return onError(error)
kurentoClient(args.ws_uri, function(error, client)
{
if(error) return onError(error);
client.create("MediaPipeline", function(error, _pipeline)
{
if(error) return onError(error);
pipeline = _pipeline;
pipeline.create("WebRtcEndpoint", function(error, webRtc){
if(error) return onError(error);
webRtcEpt = webRtc;
setIceCandidateCallbacks(webRtcPeer, webRtc, onError)
webRtc.processOffer(sdpOffer, function(error, sdpAnswer){
if(error) return onError(error);
webRtcPeer.processAnswer(sdpAnswer, onError);
});
webRtc.gatherCandidates(onError);
webRtc.connect(webRtc, function(error){
if(error) return onError(error);
console.log("Loopback established");
});
});
pipeline.create("RtpEndpoint", function(error, rtp){
if(error) return onError(error);
//setIceCandidateCallbacks2(webRtcPeer, rtp, onError)
var cm_offer = "v=0\n" +
"o=- 3641290734 3641290734 IN IP4 127.0.0.1\n" +
"s=nginx\n" +
"c=IN IP4 127.0.0.1\n" +
"t=0 0\n" +
"m=audio 60820 RTP/AVP 0\n" +
"a=rtpmap:0 PCMU/8000\n" +
"a=recvonly\n" +
"m=video 59618 RTP/AVP 101\n" +
"a=rtpmap:101 H264/90000\n" +
"a=recvonly\n";
rtp.processOffer(cm_offer, function(error, cm_sdpAnswer){
if(error) return onError(error);
//webRtcPeer.processAnswer(cm_sdpAnswer, onError);
});
//rtp.gatherCandidates(onError);
webRtcEpt.connect(rtp, function(error){
if(error) return onError(error);
console.log("RTP endpoint connected to webRTC");
});
});
});
});
}
});
stopButton.addEventListener("click", stop);
function stop() {
if (webRtcPeer) {
webRtcPeer.dispose();
webRtcPeer = null;
}
if(pipeline){
pipeline.release();
pipeline = null;
}
hideSpinner(videoInput, videoOutput);
}
function onError(error) {
if(error)
{
console.error(error);
stop();
}
}
})
function showSpinner() {
for (var i = 0; i < arguments.length; i++) {
arguments[i].poster = 'img/transparent-1px.png';
arguments[i].style.background = "center transparent url('img/spinner.gif') no-repeat";
}
}
function hideSpinner() {
for (var i = 0; i < arguments.length; i++) {
arguments[i].src = '';
arguments[i].poster = 'img/webrtc.png';
arguments[i].style.background = '';
}
}
/**
* Lightbox utility (to display media pipeline image in a modal dialog)
*/
$(document).delegate('*[data-toggle="lightbox"]', 'click', function(event) {
event.preventDefault();
$(this).ekkoLightbox();
});
当您使用WebRTC时,RTP流使用DTLS/SRTP加密。 因此,您无法在 Wireshark 等网络跟踪中看到它。
我不确定 ffmpeg 是否可以正确解码它们。
您可以尝试使用 WebRTC 网关来实现您的目标。
右键单击wireshark捕获的数据包,选择“解码为”选项,然后选择解码为rtp。您将看到您的 webrtc 应用程序传输的数据包是 srtp。
FWIW,我在尝试在 Wireshark 中检查 tcpdump 生成的 pcap 时遇到了同样的问题。这对我有用:
分析 > 启用的协议... > 搜索“RTP”> 检查 RTP 下的所有协议。
特别是,我们需要的选项是“rtp_udp”。