Why doesn't the effect get current from the link? - reactjs

I need to get localMediaStream in one effect, while it is set in another effect. Please tell me why in this context it is always null (if you do not set it in the same effect), but in this case I have a duplicate userMedia. Consequences - the camera does not go out when I call track.stop(). Based on this package
const peerConnections = useRef({});
const localMediaStream = useRef(null);
const peerMediaElements = useRef({
[LOCAL_VIDEO]: null,
});
useEffect(() => {
async function handleNewPeer({peerID, createOffer}) {
if (peerID in peerConnections.current) {
return console.warn(`Already connected to peer ${peerID}`);
}
peerConnections.current[peerID] = new RTCPeerConnection({
iceServers: freeice(),
});
peerConnections.current[peerID].onicecandidate = event => {
if (event.candidate) {
socket.emit(ACTIONS.RELAY_ICE, {
peerID,
iceCandidate: event.candidate,
});
}
}
let tracksNumber = 0;
peerConnections.current[peerID].ontrack = ({streams: [remoteStream]}) => {
tracksNumber++
if (tracksNumber === 2) { // video & audio tracks received
tracksNumber = 0;
addNewClient(peerID, () => {
if (peerMediaElements.current[peerID]) {
peerMediaElements.current[peerID].srcObject = remoteStream;
} else {
// FIX LONG RENDER IN CASE OF MANY CLIENTS
let settled = false;
const interval = setInterval(() => {
if (peerMediaElements.current[peerID]) {
peerMediaElements.current[peerID].srcObject = remoteStream;
settled = true;
}
if (settled) {
clearInterval(interval);
}
}, 1000);
}
});
}
}
/*localMediaStream.current = await navigator.mediaDevices.getUserMedia({
audio: audio,
video: video
})*/
localMediaStream.current.getTracks().forEach(track => { // localMediaStream null
peerConnections.current[peerID].addTrack(track, localMediaStream.current);
});
if (createOffer) {
const offer = await peerConnections.current[peerID].createOffer();
await peerConnections.current[peerID].setLocalDescription(offer);
socket.emit(ACTIONS.RELAY_SDP, {
peerID,
sessionDescription: offer,
});
}
}
socket.on(ACTIONS.ADD_PEER, handleNewPeer);
return () => {
socket.off(ACTIONS.ADD_PEER);
}
}, []);
// The installation, everything is as in the source, it did not work until I added the crutch above, but when it came to stopping the video stream, a bug appeared with the camera always on
useEffect(() => {
async function startCapture() {
console.log('start capture');
localMediaStream.current = await navigator.mediaDevices.getUserMedia({
audio: audio,
video: video
}).catch(console.log);
addNewClient(LOCAL_VIDEO, () => {
const localVideoElement = peerMediaElements.current[LOCAL_VIDEO];
if (localVideoElement) {
localVideoElement.volume = 0;
localVideoElement.srcObject = localMediaStream.current;
}
});
}
startCapture().then((data) => socket.emit(ACTIONS.JOIN, {room: roomID})).catch((e) => console.error(e)).finally(() => console.log('finally'));
console.log(roomID);
return () => {
localMediaStream.current.getTracks().forEach(track => track.stop());
socket.emit(ACTIONS.LEAVE);
};
}, [roomID]);
Thanks you very much.

Related

local audio is not getting stream on remote side

I am trying to make an web app with audio, video call using WebRTC.
Problem is that local audio/video working properly in my web app, but remote audio/video is not getting stream on remote side. in console there is no error. you can join room but you can't hear others audio or see video.
here's code:
useEffect(() => {
const initRoom = async () => {
socket.current = socketInit();
//Get User Audio
await captureLocalMedia();
socket.current.emit(ACTIONS.JOIN, {roomId, user});
socket.current.on(ACTIONS.ADD_PEER, handleNewPeerConnection);
async function captureLocalMedia() {
localMediaStream.current =
await navigator.mediaDevices.getUserMedia({
video: true,
audio: true,
});
}
async function handleNewPeerConnection({peerId, createOffer, user: newUser}) {
if(peerId in connections.current) {
return console.warn(`You are already joined with ${user.username}`)
}
var configuration = {
offerToReceiveAudio: true
}
connections.current[peerId] = new RTCPeerConnection({
iceServers: [
{
urls: "stun:stun.l.google.com:19302"
},
{
urls: "stun:stun1.l.google.com:19302"
},
{
urls: "stun:stun2.l.google.com:19302"
},
{
urls: "stun:stun3.l.google.com:19302"
},
{
urls: "stun:stun4.l.google.com:19302"
}
],
configuration: configuration
})
connections.current[peerId].ontrack = (event) => {
addNewClients(newUser, () => {
if(audioElements.current[newUser.id]) {
audioElements.current[newUser.id].srcObject = event.streams[0];
} else {
let settled = false;
const interval = setInterval(() => {
if(audioElements.current[newUser.id]) {
const [remoteStream] = event.streams;
audioElements.current[newUser.id].srcObject=remoteStream
settled = true;
}
if (settled) {
clearInterval(interval)
}
}, 600)
}
})
}
localMediaStream.current.getTracks().forEach((track) => {
connections.current[peerId].addTrack(
track,
localMediaStream.current
)
});
if(createOffer) {
const offer = await connections.current[peerId].createOffer()
await connections.current[peerId].setLocalDescription(offer)
socket.current.emit(ACTIONS.RELAY_SDP, {
peerId,
sessionDescription: offer
})
}
}
}
initRoom();
return () => {
localMediaStream.current
.getTracks()
.forEach((track) => track.stop());
socket.current.emit(ACTIONS.LEAVE, { roomId });
for (let peerId in connections.current) {
connections.current[peerId].close();
delete connections.current[peerId];
delete audioElements.current[peerId];
}
socket.current.off(ACTIONS.ADD_PEER);
}
}, [])
this is socketInit function:
import {io} from 'socket.io-client';
const socketInit = () => {
const options = {
'force new connection': true,
reconnectionAttempts: 'Infinity',
timeout: 10000,
transports: ['websocket'],
};
return io('http://localhost:5500', options)
};
export default socketInit;
You should check whether the offer's SDP contains information about media tracks. For example:
sdp v=0
o=- 4748410946812024893 2 IN IP4 127.0.0.1
............
a=sendrecv
**a=msid:Eei3sKzfsiJybxa4TYhANjGsFMuWe2lAxadS f798f673-566e-4a8e-9760-8d657d031acf**
............
a=rtpmap:126 telephone-event/8000
a=ssrc:3563088629 cname:0j/yv49mmBxgcAbW
a=ssrc:3563088629 msid:Eei3sKzfsiJybxa4TYhANjGsFMuWe2lAxadS f798f673-566e-4a8e-9760-8d657d031acf
a=ssrc:3563088629 mslabel:Eei3sKzfsiJybxa4TYhANjGsFMuWe2lAxadS
a=ssrc:3563088629 label:f798f673-566e-4a8e-9760-8d657d031acf
............
a=max-message-size:262144
If remote peer got information about media tracks and it doesn't work, then the problem is probably with the playing of HTMLMediaElement. Try to add the line:
audioElements.current[newUser.id].autoplay = true

Problem with STUN/TURN servers in WEBRTC video app made in MERN stack

I have hosted a peer to peer meeting react app on netlify. I have used Peerjs for my video purpose. Everything is working as expected except the video. For some networks the video of the the remote person is working and for some others it is not working. I looked up and found out that it may be a STUN/TURN issue. I then implemented all the STUN/TURN servers in my code. However the video is still not getting setup in some cases. In some cases it is working fine, in others the video is not showing up. Herewith, I am attaching th code for the video and the link to the site.
import React,{useEffect,useState} from 'react';
import {io} from "socket.io-client";
import {useParams} from 'react-router-dom';
import {Grid} from "#material-ui/core";
import Peer from 'peerjs';
var connectionOptions = {
"force new connection" : true,
"reconnectionAttempts": "Infinity",
"timeout" : 10000,
"transports" : ["websocket"]
};
const Videobox = ({isVideoMute,isAudioMute}) => {
var myPeer = new Peer(
{
config: {'iceServers': [
{urls:'stun:stun01.sipphone.com'},
{urls:'stun:stun.ekiga.net'},
{urls:'stun:stun.fwdnet.net'},
{urls:'stun:stun.ideasip.com'},
{urls:'stun:stun.iptel.org'},
{urls:'stun:stun.rixtelecom.se'},
{urls:'stun:stun.schlund.de'},
{urls:'stun:stun.l.google.com:19302'},
{urls:'stun:stun1.l.google.com:19302'},
{urls:'stun:stun2.l.google.com:19302'},
{urls:'stun:stun3.l.google.com:19302'},
{urls:'stun:stun4.l.google.com:19302'},
{urls:'stun:stunserver.org'},
{urls:'stun:stun.softjoys.com'},
{urls:'stun:stun.voiparound.com'},
{urls:'stun:stun.voipbuster.com'},
{urls:'stun:stun.voipstunt.com'},
{urls:'stun:stun.voxgratia.org'},
{urls:'stun:stun.xten.com'},
{
urls: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: 'webrtc#live.com'
},
{
urls: 'turn:192.158.29.39:3478?transport=udp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
},
{
urls: 'turn:192.158.29.39:3478?transport=tcp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
}
]} /* Sample servers, please use appropriate ones */
}
);
const peers = {}
const [socket, setSocket] = useState()
const {id:videoId} = useParams();
const videoGrid = document.getElementById('video-grid')
useEffect(()=> {
const s=io("https://weconnectbackend.herokuapp.com",connectionOptions);
setSocket(s);
return () => {
s.disconnect();
}
},[])
// let myVideoStream;
const [myVideoStream, setmyVideoStream] = useState()
const muteUnmute = () => {
const enabled = myVideoStream.getAudioTracks()[0].enabled;
if (enabled) {
myVideoStream.getAudioTracks()[0].enabled = false;
//setUnmuteButton();
} else {
//setMuteButton();
myVideoStream.getAudioTracks()[0].enabled = true;
}
}
const playStop = () => {
//console.log('object')
let enabled = myVideoStream.getVideoTracks()[0].enabled;
if (enabled) {
myVideoStream.getVideoTracks()[0].enabled = false;
//setPlayVideo()
} else {
//setStopVideo()
myVideoStream.getVideoTracks()[0].enabled = true;
}
}
useEffect(() => {
if(myVideoStream)
playStop()
}, [isVideoMute])
useEffect(() => {
if(myVideoStream)
muteUnmute()
}, [isAudioMute])
useEffect(() => {
if(socket== null)
return;
myPeer.on('open',id=>{
socket.emit('join-room',videoId,id);
})
const myVideo = document.createElement('video')
myVideo.muted = true
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(stream => {
// myVideoStream = stream;
window.localStream=stream;
setmyVideoStream(stream);
console.log(myVideoStream,"myvideostream");
addVideoStream(myVideo, stream)
myPeer.on('call', call => {
call.answer(stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
})
socket.on('user-connected',userId =>{
connectToNewUser(userId, stream)
})
socket.on('user-disconnected', userId => {
if (peers[userId]) peers[userId].close()
})
})
}, [socket,videoId])
function addVideoStream(video, stream) {
video.srcObject = stream
video.addEventListener('loadedmetadata', () => {
video.play()
})
videoGrid.append(video)
}
function connectToNewUser(userId, stream) {
const call = myPeer.call(userId, stream)
const video = document.createElement('video')
call.on('stream', userVideoStream => {
addVideoStream(video, userVideoStream)
})
call.on('close', () => {
video.remove()
})
peers[userId] = call
}
return (
<div id="video-grid" className="videoStyleFromDiv">
{/* <Video srcObject={srcObject}/> */}
</div>
)
}
export default Videobox
Website Link
The TURN servers you are using have been out of commission for a couple of years in the case of the ones taken from https://www.html5rocks.com/en/tutorials/webrtc/infrastructure/
Copying credentials from random places is not how TURN works, you will need to run your own servers.

WebRTC react stream multiple tracks issue. Camera + Screen Share

I am working on a new website. You can join room calls and make a voicecall so use your webcam but you can also share your screen.
The problem is when i add the track to the stream the peer doesn't work more... I don't know how to solve this. I also tried to change the code from adding the track to adding a new stream or anything else but cannot find a solution. Can anyone help me?
Client:
export default function Meeting(props) {
const userVideo = useRef();
const partnerVideo = useRef();
const screenShare = useRef();
const peerRef = useRef();
const socketRef = useRef();
const otherUser = useRef();
const userStream = useRef();
const userScreen = useRef();
useEffect(() => {
navigator.mediaDevices.getUserMedia({ audio: true, video: true }).then(stream => {
userVideo.current.srcObject = stream;
userStream.current = stream;
socketRef.current = socketIOClient.connect(ENDPOINT);
socketRef.current.emit("join room", props.room);
socketRef.current.on('other user', userID => {
callUser(userID);
otherUser.current = userID;
});
socketRef.current.on("user joined", userID => {
otherUser.current = userID;
});
socketRef.current.on("offer", handleRecieveCall);
socketRef.current.on("answer", handleAnswer);
socketRef.current.on("ice-candidate", handleNewICECandidateMsg);
});
}, []);
function callUser(userID) {
console.log("call user - - -d-s-d-s-d--sd-sdd-sd-ssd-sd-sd--");
peerRef.current = createPeer(userID);
userStream.current.getTracks().forEach(track => peerRef.current.addTrack(track, userStream.current));
}
function createPeer(userID) {
const peer = new RTCPeerConnection({
//sdpSemantics: 'unified-plan',
iceServers: [
{
urls: "stun:stun.stunprotocol.org"
},
{
urls: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: 'webrtc#live.com'
},
]
});
peer.onicecandidate = handleICECandidateEvent;
peer.ontrack = handleTrackEvent;
peer.onnegotiationneeded = () => {
if (peer.signalingState != "stable") return;
handleNegotiationNeededEvent(userID);
}
return peer;
}
function handleNegotiationNeededEvent(userID) {
console.log("negotiationsad-das-d-as-d-asd--asd-a-sd-a-sd-");
peerRef.current.createOffer().then(offer => {
return peerRef.current.setLocalDescription(offer);
}).then(() => {
const payload = {
target: userID,
type: "video-offer",
caller: socketRef.current.id,
sdp: peerRef.current.localDescription
};
socketRef.current.emit("offer", payload);
}).catch(e => console.log(e));
}
function handleRecieveCall(incoming) {
peerRef.current = createPeer();
const desc = new RTCSessionDescription(incoming.sdp);
peerRef.current.setRemoteDescription(desc).then(() => {
userStream.current.getTracks().forEach(track => peerRef.current.addTrack(track, userStream.current));
}).then(() => {
return peerRef.current.createAnswer();
}).then(answer => {
return peerRef.current.setLocalDescription(answer);
}).then(() => {
const payload = {
target: incoming.caller,
type: "video-offer",
caller: socketRef.current.id,
sdp: peerRef.current.localDescription
}
socketRef.current.emit("answer", payload);
})
}
function handleAnswer(message) {
const desc = new RTCSessionDescription(message.sdp);
peerRef.current.setRemoteDescription(desc).catch(e => console.log(e));
}
function handleICECandidateEvent(e) {
if (e.candidate) {
const payload = {
target: otherUser.current,
candidate: e.candidate,
}
socketRef.current.emit("ice-candidate", payload);
}
}
function handleNewICECandidateMsg(incoming) {
const candidate = new RTCIceCandidate(incoming);
if (peerRef.current && candidate) {
peerRef.current.addIceCandidate(candidate).catch(e => console.log(e));
}
}
function handleTrackEvent(e) {
var stream = e.streams[0];
var tracks = stream.getTracks();
var lun = tracks.length;
console.log(tracks);
if (lun === 2) {
partnerVideo.current.srcObject = stream;
} else if (lun === 1) {
screenShare.current.srcObject = stream;
}
};
function shareScreen() {
navigator.mediaDevices.getDisplayMedia({ cursor: true }).then(stream => {
screenShare.current.srcObject = stream;
userScreen.current = stream;
const screenTrack = stream.getTracks()[0];
callUser(otherUser.current);
peerRef.current.addTrack(screenTrack, stream);
screenTrack.onended = function () {
peerRef.current.removeTrack(screenTrack);
}
})
}
return (
<div>
<video controls style={{ height: 500, width: 500 }} autoPlay ref={userVideo} />
<video controls style={{ height: 500, width: 500 }} autoPlay ref={partnerVideo} />
<video controls style={{ height: 500, width: 500 }} autoPlay ref={screenShare} />
<button onClick={shareScreen}>Share screen</button>
</div>
);
};
In shareScreen you're adding a second track to the existing peer connection with addTrack, which triggers renegotiation (causes your onnegotiationneeded handler to run again). This is correct so far, and triggers a second offer/answer exchange, which is needed for media track additions.
You're just not set up to handle renegotiation correctly, because you've mixed initialization code in with your (re)negotiation code, causing a new RTCPeerConnection object to be created when you already have one.
Renegotiation is going to happen whenever you add tracks or stop their underlying transceivers, so you don't want initialization code in your negotiation code. Instead do this:
Move the creation of your RTCPeerConnection out of handleReceiveCall. Create it on page load instead.
Move the addition of tracks out of handleReceiveCall as well if you can, or at least skip re-adding them if they've already been added.
Your ontrack is going to fire again with the screen-sharing track, so you need to add code to handle that. You can differentiate it from the camera track by event.streams[0].id and event.transceiver.mid being different, or simply from this being the third track.

How to add some option to a select box above all mapping in React?

I want to add an All Option to my existing select box.
Select box is creating with some API data. With the API data set I want to add an ALL option above.
This is my code.
const useChemicals = () => {
const [data, setData]: any = useState([]);
useEffect(() => {
const getChemicalsData = async () => {
try {
const results = await searchApi.requestChemicalsList();
if (results.data) {
let groupCount = 0;
const chemList: any = [];
results.data.data.chemicals.map((chemical: any, index: number) => {
if (chemical.key === '') {
chemList.push({
label: chemical.value,
options: [],
});
}
});
results.data.data.chemicals.map((chemical: any, index: number) => {
if (chemical.key === '') {
if (index > 1) {
groupCount += 1;
}
} else {
chemList[groupCount].options.push({
label: chemical.value,
value: chemical.key,
});
}
});
setData([...chemList]);
}
} catch (e) {}
};
getChemicalsData();
}, []);
return data && data;
};
export default useChemicals;
How can I add this. Please help me, I am new to React.

How wait a "Array for each" function?

I got a little problem with synchronous/asynchronous system in the function "Array.foreach".
I don't know how to force my code to wait its end.
I tried to use await/async system but my code did not wait the code in "async responseDB =>".
This is my class:
...
let responsesDTO = [];
await Array.prototype.forEach.call(await searchResponsesByQuestionAndUserId(questions[cpt].idquestion, idUser), async responseDB => {
if(responseDB !== undefined){
const responseDTO = {
response_id:0,
response_text:"",
response_type:""
}
const responseEntity = await searchResponseByResponseId(responseDB.response_id);
responseDTO.response_id = responseDB.response_id;
responseDTO.response_text= responseEntity.text;
responseDTO.response_type= responseDB.type;
responsesDTO.push(responseDTO);
}
});
questionResponse.responses=responsesDTO;
questionResponses[cpt]=questionResponse;
}
Could you help me please? Thanks in advance.
I had to mock your async functions. However, the relevant part is to use for..of instead of forEach
async function searchResponsesByQuestionAndUserId() {
let responsesDB = [];
for (let i = 0; i < 10; i++) {
responsesDB.push({
response_id: parseInt(1000 * Math.random(), 10),
type: 'whatever ' + i
});
}
return new Promise((res) => {
window.setTimeout(() => {
res(responsesDB);
}, 1500);
});
}
async function searchResponseByResponseId(response_id) {
return new Promise((res) => {
window.setTimeout(() => {
res({
text: 'text for response ' + response_id
});
}, 300);
});
}
async function getResponsesDTO() {
let responsesDTO = [],
responsesDB = await searchResponsesByQuestionAndUserId();
for (let responseDB of responsesDB) {
if (responseDB === undefined) {
continue;
}
let responseDTO = {
response_id: 0,
response_text: "",
response_type: ""
},
responseEntity = await searchResponseByResponseId(responseDB.response_id);
responseDTO.response_id = responseDB.response_id;
responseDTO.response_text = responseEntity.text;
responseDTO.response_type = responseDB.type;
responsesDTO.push(responseDTO);
console.log({responseDTO});
}
return responsesDTO;
}
getResponsesDTO().then(responsesDTO => {
console.log(responsesDTO);
});

Resources