React Native Webrtc 远程流在视频通话期间是黑色的。它从远程候选者那里获取音频/视频,但它只是不会显示。
{"_id": "28967b2b-d555-4438-a8e8-b415e1831998", "_reactTag": "1658015a-9780-45ad-aa1a-b7d4829a1ee0", "_tracks": [{"_constraints": [Object], "_enabled": true, "_muted": false, "_peerConnectionId": 62, "_readyState": "live", "_settings": [Object], "id": "b32d8b02-4297-4650-b104-1b95abdfaef2", "kind": "audio", "label": "", "remote": true}, {"_constraints": [Object], "_enabled": true, "_muted": true, "_peerConnectionId": 62, "_readyState": "live", "_settings": [Object], "id": "c23b7813-7918-44fe-b9c3-237adb033900", "kind": "video", "label": "", "remote": true}]}
这是我的视频通话屏幕的代码
import { useEffect, useRef, useState } from "react";
import {
Image,
View,
StyleSheet,
TouchableOpacity,
ToastAndroid,
} from "react-native";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import {
mediaDevices,
RTCIceCandidate,
RTCSessionDescription,
RTCView,
} from "react-native-webrtc";
import * as Location from "expo-location";
import { useNetInfo } from "@react-native-community/netinfo";
import InCallManager from "react-native-incall-manager";
import IncomingCallModal from "../../../components/modals/IncomingCallModal";
import LetterImage from "../../../components/LetterImage";
import OngoingCallModal from "../../../components/modals/OngoingCallModal";
import Text from "../../../components/Text";
import useAuth from "../../../auth/useAuth";
import useSocket from "../../../utils/socket";
import useUtils from "../../../utils";
import callsApi from "../../../api/calls";
import { videoImg } from "../../../assets/images";
import colors from "../../../config/colors";
import logger from "../../../utils/logger";
import routes from "../../../navigation/routes";
import { getDeviceIp } from "../../../utils/ip";
function VideoCallScreen({ route, navigation }) {
const { contact, callState, callId, navigatedFrom } = route.params;
const { isConnected, isInternetReachable } = useNetInfo();
const { user } = useAuth();
const { closeAndReinstantiatePeerConnection, peerConnection, stopStreams } =
useUtils();
const {
answerCall,
cancelTimeout,
checkWebRTCServerStatus,
outgoingCall,
socket,
sendCall,
sendICEcandidate,
} = useSocket();
useEffect(() => {
if (navigatedFrom && callId) {
socket.emit("navigated", { calleeId: otherUserId.current });
}
}, [callId, navigatedFrom]);
const otherUserId = useRef(contact?.mobile);
const remoteRTCMessage = useRef();
const [isCalling, setIsCalling] = useState(false);
const [type, setType] = useState(callState ? callState : "VIDEO_CALL");
const [localStream, setLocalStream] = useState(null);
const [remoteStream, setRemoteStream] = useState(null);
const [localMicOn, setlocalMicOn] = useState(true);
const [localWebcamOn, setlocalWebcamOn] = useState(true);
const [mainCallId, setMainCallId] = useState("");
const timeoutID = useRef();
console.log(remoteStream);
const initStream = async () => {
let isFront = false;
mediaDevices.enumerateDevices().then((sourceInfos) => {
let videoSourceId;
for (let i = 0; i < sourceInfos.length; i++) {
const sourceInfo = sourceInfos[i];
if (
sourceInfo.kind == "videoinput" &&
sourceInfo.facing == (isFront ? "user" : "environment")
) {
videoSourceId = sourceInfo.deviceId;
}
}
mediaDevices
.getUserMedia({
audio: true,
video: true,
})
.then((stream) => {
setLocalStream(stream);
stream.getTracks().forEach((track) => {
peerConnection.current.addTrack(track, stream);
});
})
.catch((error) => {
logger.log("Could not get media stream");
logger.log(error);
});
});
};
async function processCall() {
if (isInternetReachable === false && isConnected === false) {
return alert("Check your phone's internet connection and try again.");
}
const status = await checkWebRTCServerStatus();
if (status === false) {
return alert("Call could not connect!");
}
InCallManager.start({ media: "audio", ringback: "_DTMF_" });
// await initStream();
const {
coords: { latitude, longitude },
} = await Location.getCurrentPositionAsync({
accuracy: Location.LocationAccuracy.BestForNavigation,
});
setIsCalling(true);
const res = await callsApi.addCall(
{
status: "outgoing",
callType: "video",
receiver: contact.name,
mobile: contact.mobile,
},
{ latitude, longitude }
);
if (res.ok) {
setMainCallId(res.data.call._id);
const data = {
calleeId: contact.mobile,
callInfo: {
callType: "video",
caller: {
name: user.name,
mobile: user.mobile,
},
callState: "INCOMING_CALL",
callId: res.data.call._id,
},
};
outgoingCall(data);
timeoutID.current = setTimeout(() => {
setIsCalling(false);
InCallManager.stopRingback();
InCallManager.stop();
socket.emit("callTimeout", { calleeId: otherUserId.current });
ToastAndroid.show("Call ended", ToastAndroid.SHORT);
navigation.reset({
index: 0,
routes: [
{ name: "TabNavigator" },
{ name: "AudioCall" },
{ name: "VideoCall" },
],
});
}, 32000);
}
}
const send = async () => {
// 1. Caller runs the `createOffer` method for getting SDP.
const sessionDescription = await peerConnection.current.createOffer();
// 2. Caller sets the local description using `setLocalDescription`.
await peerConnection.current.setLocalDescription(sessionDescription);
sendCall({
calleeId: otherUserId.current,
rtcMessage: sessionDescription,
});
};
const updateCall = async (id, status) => {
const {
coords: { latitude, longitude },
} = await Location.getCurrentPositionAsync();
if (callId) {
await callsApi.updateCall(
{
callStatus: status,
callId: id,
mateId: user.id,
},
{ longitude, latitude }
);
}
};
async function processAccept() {
cancelTimeout(otherUserId.current);
// await initStream();
const {
coords: { latitude, longitude },
} = await Location.getCurrentPositionAsync({
accuracy: Location.LocationAccuracy.BestForNavigation,
});
await callsApi.updateCall(
{
callStatus: "ongoing",
callId,
receiverId: user.id,
},
{ longitude, latitude }
);
// 4. Bob sets the description, Alice sent him as the remote description using `setRemoteDescription()
// try {
await peerConnection.current.setRemoteDescription(
new RTCSessionDescription(remoteRTCMessage.current)
);
// 5. Bob runs the `createAnswer` method
const sessionDescription = await peerConnection.current.createAnswer();
// 6. Bob sets that as the local description and sends it to Alice
await peerConnection.current.setLocalDescription(sessionDescription);
answerCall({
callerId: otherUserId.current,
rtcMessage: sessionDescription,
});
}
useEffect(() => {
initStream();
socket.on("callTimeout", () => {
setIsCalling(false);
leave();
updateCall(callId, "missed");
setType("VIDEO_CALL");
ToastAndroid.show("Call ended", ToastAndroid.SHORT);
navigation.reset({
index: 0,
routes: [
{ name: "TabNavigator" },
{ name: "AudioCall" },
{ name: "VideoCall" },
],
});
});
socket.on("cancelTimeout", () => {
if (timeoutID.current) {
console.log("Clearing timeoutID", timeoutID.current);
clearTimeout(timeoutID.current);
InCallManager.stopRingback();
}
});
socket.on("callEnded", () => {
streamCleanup();
stopStreams();
closeAndReinstantiatePeerConnection();
setIsCalling(false);
setType("VIDEO_CALL");
ToastAndroid.show("Call ended", ToastAndroid.SHORT);
InCallManager.stop();
InCallManager.stopRingback();
navigation.reset({
index: 0,
routes: [
{ name: "TabNavigator" },
{ name: "AudioCall" },
{ name: "VideoCall" },
],
});
});
socket.on("newCall", (data) => {
remoteRTCMessage.current = data.rtcMessage;
otherUserId.current = data.callerId;
setType("INCOMING_CALL");
});
socket.on("navigated", () => {
send();
});
socket.on("callAnswered", (data) => {
remoteRTCMessage.current = data.rtcMessage;
peerConnection.current.setRemoteDescription(
new RTCSessionDescription(remoteRTCMessage.current)
);
if (timeoutID.current) {
clearTimeout(timeoutID.current);
}
setType("WEBRTC_ROOM");
InCallManager.stopRingback();
logger.log("Remote Stream", remoteStream);
});
socket.on("ICEcandidate", (data) => {
let message = data.rtcMessage;
if (peerConnection.current) {
peerConnection?.current
.addIceCandidate(
new RTCIceCandidate({
candidate: message.candidate,
sdpMid: message.id,
sdpMLineIndex: message.label,
})
)
.then((data) => {
logger.log("SUCCESS");
})
.catch((err) => {
logger.log("Error", err);
});
}
});
socket.on("endCall", () => {
// Receive the signal to end the call from the caller
setIsCalling(false);
setType("VIDEO_CALL");
});
peerConnection.current.onaddstream = (event) => {
if (event.stream) {
logger.log("Setting Remote Stream", event.stream);
setRemoteStream(event.stream);
}
};
peerConnection.current.ontrack = (event) => {
if (event.streams && event.streams.length > 0) {
logger.log("Setting Remote Stream: ontrack", event.streams[0]);
setRemoteStream(event.streams[0]);
}
};
// Setup ice handling
peerConnection.current.onicecandidate = (event) => {
if (event.candidate) {
logger.log("Sending ICE", event.candidate);
sendICEcandidate({
calleeId: otherUserId.current,
rtcMessage: {
label: event.candidate.toJSON().sdpMLineIndex,
id: event.candidate.toJSON().sdpMid,
candidate: event.candidate.toJSON().candidate,
},
});
} else {
logger.log("End of candidates.");
}
};
const cleanup = () => {
if (peerConnection.current) {
closeAndReinstantiatePeerConnection();
}
if (localStream) {
localStream.release();
}
if (remoteStream) {
remoteStream.release();
}
};
return () => {
socket.off("newCall");
socket.off("callAnswered");
socket.off("ICEcandidate");
socket.off("callEnded");
socket.off("callTimeout");
socket.off("navigated");
cleanup();
};
}, []);
async function leave() {
closeAndReinstantiatePeerConnection();
streamCleanup();
stopStreams();
socket.emit("endCall", { calleeId: otherUserId.current });
setIsCalling(false);
setType("VIDEO_CALL");
}
const streamCleanup = () => {
if (localStream) {
localStream.getTracks().forEach((track) => {
track.enabled = false;
track.stop();
});
localStream.release();
}
if (remoteStream) {
remoteStream.getTracks().forEach((track) => {
track.stop();
});
remoteStream.release();
}
setLocalStream(null);
setRemoteStream(null);
};
function switchCamera() {
localStream.getVideoTracks().forEach((track) => {
track._switchCamera();
});
}
function toggleCamera() {
localWebcamOn ? setlocalWebcamOn(false) : setlocalWebcamOn(true);
localStream.getVideoTracks().forEach((track) => {
localWebcamOn ? (track.enabled = false) : (track.enabled = true);
});
}
function toggleMic() {
localMicOn ? setlocalMicOn(false) : setlocalMicOn(true);
localStream.getAudioTracks().forEach((track) => {
localMicOn ? (track.enabled = false) : (track.enabled = true);
});
}
switch (type) {
case "INCOMING_CALL":
return (
<IncomingCallModal
otherUserId={otherUserId.current}
caller={contact}
hangup={() => {
leave();
updateCall(callId, "missed");
setIsCalling(false);
navigation.reset({
index: 0,
routes: [
{ name: "TabNavigator" },
{ name: "AudioCall" },
{ name: "VideoCall" },
],
});
}}
join={() => {
processAccept();
setTimeout(() => {
setType("WEBRTC_ROOM");
}, 3000);
}}
/>
);
case "WEBRTC_ROOM":
return (
<View style={styles.webRTCContainer}>
{/* {localWebcamOn ? ( */}
<>
{remoteStream && remoteStream !== null && (
<View style={styles.callerVideo}>
<RTCView
streamURL={remoteStream.toURL()}
objectFit="cover"
style={{ height: "100%", width: "100%" }}
/>
</View>
)}
{localStream && (
<RTCView
streamURL={localStream.toURL()}
objectFit="cover"
style={styles.video}
/>
)}
</>
{/* ) : (
<View style={styles.cameraBlocked}>
<LetterImage name={user.name} />
<MaterialCommunityIcons name="video-off-outline" size={50} />
<Text>Camera is turned off</Text>
</View>
)} */}
<View
style={{
backgroundColor: colors.black,
flexDirection: "row",
justifyContent: "space-evenly",
paddingVertical: 20,
}}
>
<TouchableOpacity
style={styles.btn}
onPress={async () => {
leave();
InCallManager.stopRingback();
InCallManager.stop();
await callsApi.updateCallStatus({
callId,
callStatus: "finished",
});
navigation.reset({
index: 0,
routes: [
{ name: "TabNavigator" },
{ name: "AudioCall" },
{ name: "VideoCall" },
],
});
}}
>
<MaterialCommunityIcons
color={colors.white}
name="phone"
size={20}
/>
</TouchableOpacity>
<TouchableOpacity
style={[
styles.btn,
{
backgroundColor: colors.grey,
},
]}
onPress={() => toggleMic()}
>
{localMicOn ? (
<MaterialCommunityIcons
name="microphone"
size={20}
color={colors.primary_1100}
/>
) : (
<MaterialCommunityIcons
name="microphone-off"
size={20}
color={colors.primary_1100}
/>
)}
</TouchableOpacity>
<TouchableOpacity
style={[
styles.btn,
{
backgroundColor: colors.grey,
},
]}
onPress={() => toggleCamera()}
>
{localWebcamOn ? (
<MaterialCommunityIcons
name="video-outline"
color={colors.primary_1100}
size={20}
/>
) : (
<MaterialCommunityIcons
name="video-off-outline"
color={colors.primary_1100}
size={20}
/>
)}
</TouchableOpacity>
<TouchableOpacity
style={[
styles.btn,
{
backgroundColor: colors.grey,
},
]}
onPress={() => switchCamera()}
>
<MaterialCommunityIcons
name="camera-switch-outline"
color={colors.primary_1100}
size={20}
/>
</TouchableOpacity>
</View>
</View>
);
// case "WEBRTC_ROOM":
// return (
// <OngoingCallModal
// localMicOn={localMicOn}
// localStream={localStream}
// localWebcamOn={localWebcamOn}
// remoteStream={remoteStream}
// hangup={async () => {
// leave();
// InCallManager.stopRingback();
// InCallManager.stop();
// await callsApi.updateCallStatus({
// callId,
// callStatus: "finished",
// });
// navigation.reset({
// index: 0,
// routes: [
// { name: "TabNavigator" },
// { name: "AudioCall" },
// { name: "VideoCall" },
// ],
// });
// }}
// switchCamera={switchCamera}
// toggleCamera={toggleCamera}
// toggleMic={toggleMic}
// />
// );
case "VIDEO_CALL":
return (
<>
<View style={styles.container}>
<View style={styles.contactInfo}>
<View>
{contact?.image ? (
<Image
source={contact?.image}
resizeMode="cover"
style={styles.userImage}
/>
) : contact.name ? (
<LetterImage name={contact?.name} style={styles.userImage} />
) : (
<MaterialCommunityIcons
color={colors.lightText}
name="account-circle"
size={50}
/>
)}
<View style={styles.callInfo}>
<Text style={styles.callerName}>{contact?.name}</Text>
<Text style={styles.email}>{contact.mobile}</Text>
</View>
</View>
<View style={styles.circle}>
<Image
source={require("../../../assets/images/video.jpg")}
style={styles.chatWindow}
/>
</View>
<View>
{isCalling ? (
<TouchableOpacity
onPress={async () => {
leave();
InCallManager.stopRingback();
InCallManager.stop();
await callsApi.updateCallStatus({
callId: callId || mainCallId,
callStatus: "finished",
});
navigation.reset({
index: 0,
routes: [
{ name: "TabNavigator" },
{ name: "AudioCall" },
{ name: "VideoCall" },
],
});
}}
style={[styles.callBtn, { backgroundColor: colors.danger }]}
>
<MaterialCommunityIcons
name="close"
style={{ marginRight: 8 }}
size={20}
/>
<Text style={styles.btnLabel}>Calling</Text>
</TouchableOpacity>
) : (
<TouchableOpacity
onPress={() => {
processCall();
}}
style={styles.callBtn}
>
<MaterialCommunityIcons
color={colors.white}
name="phone"
style={{ marginRight: 5 }}
size={20}
/>
<Text style={styles.btnLabel}>Call</Text>
</TouchableOpacity>
)}
<Text
style={[styles.bottomText, { fontFamily: "PoppinsMedium" }]}
>
Call using:{" "}
<Text style={[styles.bottomText, { color: "#00000099" }]}>
{user.mobile}
</Text>
</Text>
</View>
</View>
</View>
</>
);
default:
null;
}
}
const styles = StyleSheet.create({
bContainer: {
alignItems: "center",
bottom: 50,
justifyContent: "center",
},
bottomText: {
fontSize: 11.1495,
lineHeight: 17,
marginVertical: 10,
},
btn: {
alignItems: "center",
backgroundColor: colors.danger,
borderRadius: 35,
height: 55,
justifyContent: "center",
width: 55,
},
btnLabel: {
color: colors.white,
fontFamily: "PoppinsMedium",
fontSize: 14.866,
lineHeight: 22,
},
callBtn: {
alignItems: "center",
backgroundColor: colors.primary,
borderRadius: 50,
flexDirection: "row",
justifyContent: "center",
height: 50,
padding: 10,
width: 150,
},
callInfo: {
alignItems: "center",
},
callerName: {
fontFamily: "PoppinsBold",
fontSize: 16,
letterSpacing: 0.5,
lineHeight: 24,
},
circle: {
borderRadius: 250,
height: 312,
overflow: "hidden",
marginVertical: 20,
width: 307.05,
},
container: {
alignItems: "center",
backgroundColor: "white",
flex: 1,
paddingHorizontal: 15,
},
email: {
fontFamily: "Inter",
fontSize: 12,
letterSpacing: 0.1,
lineHeight: 15,
},
cameraBlocked: {
alignItems: "center",
backgroundColor: colors.black,
flex: 1,
justifyContent: "center",
},
callerVideo: {
height: 200,
right: 20,
position: "absolute",
top: 70,
width: 120,
zIndex: 1,
},
emoji: {
marginHorizontal: 10,
},
emojis: {
alignItems: "center",
flexDirection: "row",
},
icon: {
color: colors.white,
fontSize: 20,
},
iconContain: {
alignItems: "center",
borderColor: colors.white,
borderRadius: 20,
borderWidth: 2,
height: 39.21,
justifyContent: "center",
marginHorizontal: 10,
width: 38.58,
},
option: {
alignItems: "center",
backgroundColor: colors.white,
elevation: 1,
borderRadius: 20.9368,
flexDirection: "row",
marginHorizontal: 10,
padding: 10,
paddingHorizontal: 10,
width: "auto",
},
optionIcon: {
marginHorizontal: 5,
fontSize: 15,
},
options: {
alignItems: "center",
flexDirection: "row",
justifyContent: "space-around",
},
contactInfo: {
alignItems: "center",
gap: 40,
flex: 1,
width: "100%",
},
userImage: {
alignSelf: "center",
borderRadius: 30,
height: 55,
marginBottom: 10,
width: 55,
},
chatWindow: {
alignItems: "center",
justifyContent: "flex-end",
height: "100%",
width: "100%",
},
video: {
height: "100%",
width: "100%",
position: "absolute",
top: 0,
},
webRTCContainer: {
flex: 1,
justifyContent: "flex-end",
},
});
export default VideoCallScreen;
以及我的对等连接设置的代码
import { useRef } from "react";
import { mediaDevices, RTCPeerConnection } from "react-native-webrtc";
export const configuration = {
iceServers: [
{
urls: "stun:stun.l.google.com:19302",
},
{
urls: "stun:stun1.l.google.com:19302",
},
{
urls: "stun:stun2.l.google.com:19302",
},
{
urls: "stun:stun3.l.google.com:19302",
},
{
urls: "stun:stun4.l.google.com:19302",
},
{
urls: "stun:stun.relay.metered.ca:80",
},
{
urls: "turn:standard.relay.metered.ca:80",
username: "df6c0fa43e08de3464664fd4",
credential: "TJdtVTyINKkVaUqb",
},
{
urls: "turn:standard.relay.metered.ca:80?transport=tcp",
username: "df6c0fa43e08de3464664fd4",
credential: "TJdtVTyINKkVaUqb",
},
{
urls: "turn:standard.relay.metered.ca:443",
username: "df6c0fa43e08de3464664fd4",
credential: "TJdtVTyINKkVaUqb",
},
{
urls: "turns:standard.relay.metered.ca:443?transport=tcp",
username: "df6c0fa43e08de3464664fd4",
credential: "TJdtVTyINKkVaUqb",
},
],
};
const useUtils = () => {
const peerConnection = useRef(new RTCPeerConnection(configuration));
const closeAndReinstantiatePeerConnection = () => {
// Close the current RTCPeerConnection
peerConnection.current.close();
// Create a new instance of RTCPeerConnection
const newPeerConnection = new RTCPeerConnection(configuration);
// Update the ref with the new instance
peerConnection.current = newPeerConnection;
};
const stopStreams = () => {
if (peerConnection.current) {
peerConnection.current.getSenders().forEach((sender) => {
if (sender.track) {
sender.track.stop();
}
});
peerConnection.current.getReceivers().forEach((receiver) => {
if (receiver.track) {
receiver.track.stop();
}
});
}
};
// async function getStream() {
// let isFront = true;
// const sourcesInfos = await mediaDevices.enumerateDevices();
// let videoSourceId;
// for (let i = 0; i < sourcesInfos.length; i++) {
// const sourceInfo = sourcesInfos[i];
// if (
// sourceInfo.kind === "videoinput" &&
// sourceInfo.facing == (isFront ? "front" : "environment")
// ) {
// videoSourceId = sourceInfo.deviceId;
// }
// }
// const stream = await mediaDevices.getUserMedia({
// audio: true,
// video: {
// mandatory: {
// minWidth: 500,
// minFrameRate: 30,
// minHeight: 300,
// },
// deviceId: videoSourceId,
// facingMode: isFront ? "user" : "environment",
// frameRate: 30,
// height: 480,
// width: 640,
// optional: videoSourceId ? [{ sourceId: videoSourceId }] : [],
// },
// });
// if (typeof stream != "boolean") return stream;
// return null;
// }
return {
closeAndReinstantiatePeerConnection,
configuration,
// getStream,
peerConnection,
stopStreams,
};
};
export default useUtils;
拜托,我真的需要帮助。我已经面临这个问题数周了,而且完成项目的时间有限。请帮助我。
我使用外部组件将所有视频通话逻辑代码放入一个文件中,期望它能够工作,但事实并非如此。
同样的问题,解决了吗?