I am woking on WebRTC.
I have declared a variable localStream like
var localStream;
outside the component. The localStream contains the MediaStream at first but when my callback function is called after calling the Signaling Server, the variable localStream is empty. I want to store the MediaStream in the state so that it won't be empty after calling the Signaling server.
on page load -> localStream contains the stream
From client I place a call and it calls Signaling Server
From Signalling Server it calls back the client and here localStream is empty.
I tried useState(), useState(undefined) It doesn't work. It is null.
import ....
const hubUrl = 'https://f1d3e599fe6c.ngrok.io/ConnectionHub';
const conn =
new signalR.HubConnectionBuilder()
.withUrl(hubUrl, signalR.HttpTransportType.WebSockets)
.configureLogging(signalR.LogLevel.Debug)
.withAutomaticReconnect()
.build();
var localStream;
var peerConnectionConfig = { iceServers: [{ url: 'stun:stun.l.google.com:19302' }] };
onst webrtcConstraints = { audio: true, video: true };
const Chat = (props) => {
React.useEffect(() => {
...
}
receivedSdpSignal = (connection, partnerClientId, sdp) => {
console.log('WebRTC: processing sdp signal');
console.log('sdp', sdp);
connection.setRemoteDescription(new RTCSessionDescription(sdp)).then(() => {
console.log('WebRTC: set Remote Description');
if (connection.remoteDescription.type == "offer") {
console.log('WebRTC: remote Description type offer');
console.log('localStream', localStream);
connection.addStream(localStream);
console.log('WebRTC: added stream');
connection.createAnswer().then((desc) => {
console.log('WebRTC: create Answer...');
console.log('WebRTC: Description...');
console.log(desc);
connection.setLocalDescription(desc).then(() => {
console.log('WebRTC: set Local Description...');
console.log(connection.localDescription);
this.sendHubSignal(JSON.stringify({ "sdp": connection.localDescription }), partnerClientId);
}).catch(err => console.log("WebRTC: Error while setting local description", err));
}, err => console.log("WebRTC: Error while creating the answer", err));
} else if (connection.remoteDescription.type == "answer") {
console.log('WebRTC: remote Description type answer');
}
}).catch(err => console.log("WebRTC: Error while setting remote description", err));
}
const initializeUserMedia = () => {
console.log('WebRTC: called initializeUserMedia: ');
mediaDevices.getUserMedia(webrtcConstraints).then((stream) => {
console.log("WebRTC: got media stream");
localStream = stream;
let audioTracks = localStream.getAudioTracks();
if (audioTracks.length > 0) {
console.log(`Using Audio device: ${audioTracks[0].label}`);
}
}).catch(err => console.log("Error getting user media stream.", err));
}
This function is outside UseEffect(){}. The localStream is null here.
Related
I am implementing Audio/Video call with SIP js and Astrisk server in React JS.I was successful on creating the WebRTC Audio/Video calling. But I am facing an issue with storing the Invitation or Session Object for SIP js. Because Circular JSON data can't be stringed to store.
Assume someone has started calling and the other end got notification of calling and in that case if the page refreshed or reloaded I am unable to recover the call session to take any action(answer/ decline)
/**
* The following code is inside useState and the dependency are handled properly.
* For making it simple and sort I have just copied the required parts. */
const simpleUserDelegate = {
onCallAnswered: (session) => {
console.log(` Call answered`);
if (simpleUser) {
let remoteVideoTrack = simpleUser.getRemoteVideoTrack(session);
if (remoteVideoTrack) {
} else {
setIsAudioCall(true);
}
}
setIsCallAnswered(true);
setIsCallRecieved(false);
localStorage.setItem('isCallRecieved',null);
localStorage.setItem('callerName',null);
localStorage.setItem('callerImage',null);
setIsCallling(false);
},
onCallCreated: (session) => {
setCallSession(session);
console.log(session,` Call created`);
//console.log('session====>',JSON.stringify(session))
// localStorage.setItem('callerUserAgent',JSON.stringify(session._userAgent));
setIsCallling(true);
localStorage.getItem('callerUserAgent')
},
onCallReceived: (invitation) => {
console.log('invitation',invitation);
console.log('invitationSession',invitation.session);
setCallerActiveRoom(invitation._userAgent.options.displayRoomId);
setCallerName(invitation._userAgent.options.displayName);
setCallerImage(invitation._userAgent.options.displayImage);
localStorage.setItem('callerUserAgent',JSON.stringify(invitation.request));
console.log(` Call received`);
// dispatch(setActiveRoomId(invitation._userAgent.options.displayRoomId));
setIsCallRecieved(true);
localStorage.setItem('isCallRecieved',true);
localStorage.setItem('callerName',invitation._userAgent.options.displayName);
localStorage.setItem('callerImage',invitation._userAgent.options.displayImage);
},
onCallHangup: () => {
console.log(` Call hangup`);
setIsCallling(false);
setIsCallRecieved(false);
localStorage.setItem('isCallRecieved',null);
localStorage.setItem('callerName',null);
localStorage.setItem('callerImage',null);
setIsCallAnswered(false);
},
onCallHold: () => {
console.log(` Call hold`);
},
onRegistered: () => {
//console.log('session',session);
console.log(` Call registered`);
},
onUnregistered: () => {
console.log(` Call unregistered`);
},
onServerConnect: () => {
console.log(` server connect`);
},
onServerDisconnect: () => {
console.log(` server dis connect`);
}
};
let simpleUserOptions = {
// traceSip: false,
// logBuiltinEnabled: false,
delegate: simpleUserDelegate,
media: {
constraints: {
audio: true,
video: true
},
local: {
video: document.getElementById('localMedia')
},
remote: {
video: document.getElementById('remoteMedia'),
//audio: remoteAudioRef.current
}
},
userAgentOptions: {
logBuiltinEnabled: true,
logLevel: "debug",
authorizationPassword: password,
authorizationUsername: username,
uri: urI,
noAnswerTimeout : 30,
displayName: name,
displayImage: profileImage,
displayRoomId: `hi${displayRoomId}`
},
};
const simpleUserObj = new Web.SessionManager('wss://pbx.scinner.com:8089/ws', simpleUserOptions);
if(!simpleUserObj.isConnected()){
simpleUserObj
.connect()
.then(() => {
console.log(`${user.username} connected`);
simpleUserObj.register().then(() => {
console.log(`${user.username} registerd`);
}).catch((error) => {
alert("Failed to register.\n" + error);
});
})
.catch((error) => {
alert("Failed to connect.\n" + error);
});
setIsSARegistered(true);
setSimpleUser(simpleUserObj);
setCallerUserAgent
}else{
console.log('isconnected');
setIsSARegistered(true);
}
/**
Set calling
*/
const setCalling = (name, target) => {
simpleUser
.call(target, {
sessionDescriptionHandlerOptions: {
constraints: {
audio: true,
video: true
}
},
inviteWithoutSdp: false
}).then(() => {
console.log(`anon placed a call`);
}).catch((error) => {
console.error(`[${simpleUser.id}] failed to place call`);
console.error(error);
alert("Failed to place call.\n" + error);
});
//setIsCallling(true);
// console.log('isCallling', isCallling)
}
}
const answerCall = () => {
//callSession stored in local state
if (callSession) {
simpleUser.answer(callSession).then(() => {
console.log(`call answered`);
}).catch((error) => {
console.error(`call answered failed`);
console.error(error);
// alert("Failed to place call.\n" + error);
});
}
};
I am trying to make an web app with audio, video call using WebRTC.
Problem is that local audio/video working properly in my web app, but remote audio/video is not getting stream on remote side. in console there is no error. you can join room but you can't hear others audio or see video.
here's code:
useEffect(() => {
const initRoom = async () => {
socket.current = socketInit();
//Get User Audio
await captureLocalMedia();
socket.current.emit(ACTIONS.JOIN, {roomId, user});
socket.current.on(ACTIONS.ADD_PEER, handleNewPeerConnection);
async function captureLocalMedia() {
localMediaStream.current =
await navigator.mediaDevices.getUserMedia({
video: true,
audio: true,
});
}
async function handleNewPeerConnection({peerId, createOffer, user: newUser}) {
if(peerId in connections.current) {
return console.warn(`You are already joined with ${user.username}`)
}
var configuration = {
offerToReceiveAudio: true
}
connections.current[peerId] = new RTCPeerConnection({
iceServers: [
{
urls: "stun:stun.l.google.com:19302"
},
{
urls: "stun:stun1.l.google.com:19302"
},
{
urls: "stun:stun2.l.google.com:19302"
},
{
urls: "stun:stun3.l.google.com:19302"
},
{
urls: "stun:stun4.l.google.com:19302"
}
],
configuration: configuration
})
connections.current[peerId].ontrack = (event) => {
addNewClients(newUser, () => {
if(audioElements.current[newUser.id]) {
audioElements.current[newUser.id].srcObject = event.streams[0];
} else {
let settled = false;
const interval = setInterval(() => {
if(audioElements.current[newUser.id]) {
const [remoteStream] = event.streams;
audioElements.current[newUser.id].srcObject=remoteStream
settled = true;
}
if (settled) {
clearInterval(interval)
}
}, 600)
}
})
}
localMediaStream.current.getTracks().forEach((track) => {
connections.current[peerId].addTrack(
track,
localMediaStream.current
)
});
if(createOffer) {
const offer = await connections.current[peerId].createOffer()
await connections.current[peerId].setLocalDescription(offer)
socket.current.emit(ACTIONS.RELAY_SDP, {
peerId,
sessionDescription: offer
})
}
}
}
initRoom();
return () => {
localMediaStream.current
.getTracks()
.forEach((track) => track.stop());
socket.current.emit(ACTIONS.LEAVE, { roomId });
for (let peerId in connections.current) {
connections.current[peerId].close();
delete connections.current[peerId];
delete audioElements.current[peerId];
}
socket.current.off(ACTIONS.ADD_PEER);
}
}, [])
this is socketInit function:
import {io} from 'socket.io-client';
const socketInit = () => {
const options = {
'force new connection': true,
reconnectionAttempts: 'Infinity',
timeout: 10000,
transports: ['websocket'],
};
return io('http://localhost:5500', options)
};
export default socketInit;
You should check whether the offer's SDP contains information about media tracks. For example:
sdp v=0
o=- 4748410946812024893 2 IN IP4 127.0.0.1
............
a=sendrecv
**a=msid:Eei3sKzfsiJybxa4TYhANjGsFMuWe2lAxadS f798f673-566e-4a8e-9760-8d657d031acf**
............
a=rtpmap:126 telephone-event/8000
a=ssrc:3563088629 cname:0j/yv49mmBxgcAbW
a=ssrc:3563088629 msid:Eei3sKzfsiJybxa4TYhANjGsFMuWe2lAxadS f798f673-566e-4a8e-9760-8d657d031acf
a=ssrc:3563088629 mslabel:Eei3sKzfsiJybxa4TYhANjGsFMuWe2lAxadS
a=ssrc:3563088629 label:f798f673-566e-4a8e-9760-8d657d031acf
............
a=max-message-size:262144
If remote peer got information about media tracks and it doesn't work, then the problem is probably with the playing of HTMLMediaElement. Try to add the line:
audioElements.current[newUser.id].autoplay = true
I'm trying to start a WebRTC call with AWS Kinesis, but the demo on The AWS Kinesis Javascript docs only shows how to join the call as a VIEWER not the MASTER.
I can't find a clear example anywhere online, and I've spent hours on it with my teammate.
I can see and hear myself, so I know I'm getting the hardware working correctly, but we can't see or hear each other. I know it's going to be something simple, but I just can't figure out where I'm going wrong with the connection.
const startKinesisCall = async () => {
const coachingSession = new AWS.KinesisVideo({
region,
accessKeyId,
secretAccessKey,
correctClockSkew: true
});
// Get Signaling Channel Endpoints
// Each signaling channel is assigned an HTTPS and WSS endpoint to connect to for
// data-plane operations. These can be discovered using the GetSignalingChannelEndpoint API.
const getSignalingChannelEndpointResponse = await coachingSession.getSignalingChannelEndpoint({
ChannelARN: channelARN,
SingleMasterChannelEndpointConfiguration: {
Protocols: ['WSS', 'HTTPS'],
Role: Role.VIEWER
}
}).promise();
const endpointsByProtocol = getSignalingChannelEndpointResponse?.ResourceEndpointList?.reduce((endpoints, endpoint) => {
endpoints[endpoint.Protocol] = endpoint?.ResourceEndpoint;
return endpoints;
}, {});
// Create KVS Signaling Client
// The HTTPS endpoint from the GetSignalingChannelEndpoint response is used with this client.
// This client is just used for getting ICE servers, not for actual signaling.
const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
region,
accessKeyId,
secretAccessKey,
endpoint: endpointsByProtocol.HTTPS,
correctClockSkew: true,
});
// Get ICE server configuration
// For best performance, we collect STUN and TURN ICE server configurations.
// The KVS STUN endpoint is always stun:stun.kinesisvideo.${region}.amazonaws.com:443.
// To get TURN servers, the GetIceServerConfig API is used.
const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
.getIceServerConfig({
ChannelARN: channelARN,
}).promise();
const iceServers = [{ urls: `stun:stun.kinesisvideo.${region}.amazonaws.com:443` }];
getIceServerConfigResponse.IceServerList.forEach(iceServer =>
iceServers.push({
urls: iceServer.Uris,
username: iceServer.Username,
credential: iceServer.Password,
}),
);
console.log('ICE SERVERS: ', iceServers);
// Create RTCPeerConnection
// The RTCPeerConnection is the primary interface for WebRTC communications in the Web.
const peerConnection = new RTCPeerConnection({ iceServers });
// Create WebRTC Signaling Client
// This is the actual client that is used to send messages over the signaling channel.
const signalingClient = new SignalingClient({
channelARN,
channelEndpoint: endpointsByProtocol.WSS,
role: Role.MASTER,
region,
clientId,
credentials: {
accessKeyId,
secretAccessKey,
},
systemClockOffset: coachingSession.config.systemClockOffset
});
// GET THE USER MEDIA DEVICES
const localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).catch(e => {
console.log("COULD NOT FIND WEBCAM");
setShowErrorStartingVideoModal(true);
});
// *** AUDIO & VIDEO DEVICE COLLECTION ***
let audioInputDevices: MediaDeviceInfo[];
let audioOutputDevices: MediaDeviceInfo[];
let videoInputDevices: MediaDeviceInfo[];
try {
const mediaDevices = await navigator.mediaDevices.enumerateDevices();
audioInputDevices = mediaDevices.filter(device => device.kind === 'audioinput');
audioOutputDevices = mediaDevices.filter(device => device.kind === 'audiooutput');
videoInputDevices = mediaDevices.filter(device => device.kind === 'videoinput');
setMicrophoneList(audioInputDevices);
setSpeakerList(audioOutputDevices);
setCameraList(videoInputDevices);
} catch (e) {
console.log(e);
console.log("ERROR COLLECTING MEDIA DEVICE INFORMATION: MAKE SURE PERMISSIONS ARE ALLOWED AND TRY AGAIN");
};
// GRAB THE LOCAL PROVIDER AND PATIENT VIDEO TILES
const providerVideoTile: HTMLVideoElement = document.getElementById('provider-video-element') as HTMLVideoElement;
const patientVideoElement = document.getElementById('patient-video-element') as HTMLVideoElement;
// let dataChannel: RTCDataChannel
// Add Signaling Client Event Listeners
signalingClient.on('open', async () => {
if (!localStream || !peerConnection) return;
// Get a stream from the webcam, add it to the peer connection, and display it in the local view
try {
localStream.getTracks().forEach(track => peerConnection.addTrack(track, localStream));
providerVideoTile.srcObject = localStream;
} catch (e) {
// Could not find webcam
console.log(e);
return;
};
// Create an SDP offer and send it to the master
const offer = await peerConnection.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
});
console.log('CREATED OFFER: ', offer);
await peerConnection.setLocalDescription(offer);
if (peerConnection.localDescription) signalingClient.sendSdpOffer(peerConnection.localDescription, patient.patientID);
});
// When the SDP answer is received back from the master, add it to the peer connection.
signalingClient.on('sdpAnswer', async answer => {
console.log('RECEIVED ANSWER: ', answer);
if (!peerConnection) return;
await peerConnection.setRemoteDescription(answer).catch(e => console.log(e));
});
signalingClient.on('sdpOffer', async (offer, senderClientID) => {
console.log({ offer });
if (!peerConnection) return;
await peerConnection.setRemoteDescription(offer).catch(e => console.log(e));
console.log('REMOTE DESCRIPTION SET: ', peerConnection);
const answer = await peerConnection.createAnswer().catch(e => console.log(e));
console.log({ answer });
if (answer) signalingClient.sendSdpAnswer(answer, senderClientID);
// dataChannel = peerConnection.createDataChannel(`data-channel-of-${senderClientID}`);
// dataChannel.addEventListener("open", (event) => {
// console.log(event);
// dataChannel.send('******HI ALEC*******');
// });
});
// When an ICE candidate is received from the master, add it to the peer connection.
signalingClient.on('iceCandidate', async (candidate, senderClientID) => {
if (!peerConnection) return;
console.log('new iceCandidate received:', candidate);
await peerConnection.addIceCandidate(candidate).catch(e => console.log(e));
console.log("ICE CANDIDATE ADDED: ", candidate);
});
signalingClient.on('close', async () => {
if (!localStream) return;
// Handle client closures
console.log("ENDING THE CALL");
localStream.getTracks().forEach(track => track.stop());
peerConnection.close();
if ('srcObject' in providerVideoTile) providerVideoTile.srcObject = null;
});
signalingClient.on('error', error => {
// Handle client errors
console.log(error);
});
signalingClient.on('chat', (dataMessage: any) => {
const decodedMessage = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
console.log("GOT TEST MESSAGE:", decodedMessage);
});
signalingClient.on('SeriesData', (dataMessage: any) => {
const seriesFromMobile = JSON.parse(UTF8Decoder.decode(new Uint8Array(dataMessage.data)));
console.log("SERIES FROM MOBILE:", seriesFromMobile);
kickOffSeriesCreation(seriesFromMobile);
});
signalingClient.on('EffortMarker', (dataMessage: any) => {
const effortMarker = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
console.log("EFFORT MARKER:", effortMarker);
setEffortMarker(effortMarker);
});
signalingClient.on('CoachingMessage', async (dataMessage: any) => {
const coachingMessage = UTF8Decoder.decode(new Uint8Array(dataMessage.data));
console.log("COACHING MESSAGE FROM MOBILE:", coachingMessage);
if (coachingMessage === 'EndSeries') {
await handleForceEndEffort(signalingClient);
await handleEndSeries(signalingClient);
};
});
// Add Peer Connection Event Listeners
// Send any ICE candidates generated by the peer connection to the other peer
peerConnection.addEventListener('icecandidate', ({ candidate }) => {
if (candidate) {
console.log(candidate);
signalingClient.sendIceCandidate(candidate, patient.patientID);
} else {
// No more ICE candidates will be generated
console.log('NO MORE ICE CANDIDATES WILL BE GENERATED');
}
});
// As remote tracks are received, add them to the remote view
peerConnection.addEventListener('track', event => {
// if (patientVideoElement.srcObject) return;
setNoPatientConnected(false);
console.log({ event });
try {
peerConnection.addTrack(event.track, event.streams[0]);
if (event.track.kind === 'video') patientVideoElement.srcObject = event.streams[0];
} catch (e) {
console.log(e);
}
});
// Open Signaling Connection
signalingClient.open();
};
Try this this page, You can use master on one computer and viewer on other.
https://awslabs.github.io/amazon-kinesis-video-streams-webrtc-sdk-js/examples/index.html
For anyone else with the same issue, I managed to find the master example on this github repo and was able to get it working
Running into an issue with React/Socket.io. I have two different socket emitters/listeners: one for a chat, and one for keeping track live changes to the application. I have two separate windows running localhost. The issue is when i emit a change on one window, the other window can receive that change the first time but never again (i.e. get first chat message but none that follow). After that first emit/receive, the sending client starts to receive its own emitters.
front end code:
`
socket = io("localhost:3002");
componentDidMount() {
//get id from url
const { id } = this.props.match.params;
//join specific room for project
this.socket.on("connect", () => {
this.socket.emit("room", this.projectId);
});
//listener for incoming messages
this.socket.on("RECEIVE_MESSAGE", (data) => {
this.props.addChat(this.projectId, data);
});
this.socket.on("UPDATE_PROJECT", () => {
console.log("update");
this.props.fetchProject(id);
});
}
emitTaskChange = () => {
this.socket.emit("TASK_CHANGE", { data: null });
};
onChatSubmit = (e) => {
e.preventDefault();
//create object with current user as author, message, and a timestamp
const chat = {
author: this.props.currentUser.name,
message: this.state.newChat,
createdAt: new Date().toLocaleString(),
};
//send message through socket
this.socket.emit("SEND_MESSAGE", chat);
//call action creator to add new chat
this.props.addChat(this.projectId, chat);
this.setState({ currentMessage: "" });
};
handleTaskEdit = (taskId, currentStatus) => {
const newStatus = currentStatus === "todo" ? "inprogress" : "completed";
this.props.editTask(this.projectId, taskId, newStatus);
this.emitTaskChange();
};
`
backend code:
`
const io = socket(server);
//create separate chat rooms using project id
io.on("connection", (socket) => {
socket.on("room", (room) => {
socket.join(room);
socket.in(room).on("SEND_MESSAGE", (message) => {
socket.emit("RECEIVE_MESSAGE", message);
});
socket.in(room).on("TASK_CHANGE", (data) => {
socket.emit("UPDATE_PROJECT", data);
});
});
`
found the error:
had to change the server-side code from socket.on and instead use the io object that was initialized such as io.sockets.on
So I am using jssip 3.2.10 to make calls on a React project.
The server is setup on Asterisk and CentOS.
I can make calls where the call receiver hears me well, but I can't hear their audio, nor the waiting (traditional) beep noises it should make until the call is picked up.
It does work with some sipml5/asterisk udp online tests so I feel it's on my clients side issue. I tested it on Chrome and Firefox (both latest, with the same results).
My setup
I have a helper to connect called sip.js:
const JsSIP = require('jssip')
const GLOBAL = require('../globals')
function register(user, pass, cb) {
console.log('Registering to SIP')
JsSIP.debug.disable('JsSIP:*')
const address = GLOBAL.jssip_server + ':' + GLOBAL.jssip_port
let socket = new JsSIP.WebSocketInterface('ws://' + address + '/ws')
const configuration = {
sockets: [socket],
uri: 'sip:' + user + '#' + GLOBAL.jssip_server,
authorization_user: user,
password: pass,
connection_recovery_min_interval: 3,
register: true
}
let ua = new JsSIP.UA(configuration)
ua.start()
cb(ua)
}
export {
register
}
Then on my main component I do the following:
componentDidMount() {
if(GLOBAL.jssip) {
this.props.dispatch(connecting(true))
register('***', '***', (ua) => {
this.setState({ua: ua}, () => {
this.state.ua.on("registered", () => {
this.props.dispatch(connecting(false))
this.setState({critical: false})
})
this.state.ua.on("registrationFailed", () => {
this.props.dispatch(connecting(false))
this.setState({critical: true})
})
})
})
}
}
And when I try to make a call I do the following:
doCall(number) {
this.props.dispatch(placeCall(call))
if(GLOBAL.jssip) {
let eventHandlers = {
'connecting': (e) => {
console.log('call is in progress')
this.setState({sipStatus: "connecting"})
},
'progress': (e) => {
console.log('call is in progress')
this.setState({sipStatus: "progress"})
},
'failed': (e) => {
console.log('call failed with cause: ', e)
this.setState({sipStatus: "failed"})
},
'ended': (e) => {
console.log('call ended with cause: ', e)
this.setState({sipStatus: "ended"})
},
'confirmed': (e) => {
this.setState({sipStatus: "confirmed"})
}
}
let options = {
eventHandlers: eventHandlers,
mediaConstraints: { 'audio': true, 'video': false }
}
let session = this.state.ua.call('sip:'+number+'#'+GLOBAL.jssip_server, options)
}
}
Anyone has a clue on how to fix this?
Thanks to the answer here:
How to handle audio stream in JsSIP?
I found the solution, I needed to add to the file rendering the call:
<audio ref={(audio) => {this.audioElement = audio}} id="audio-element"></audio>
And changed doCall last bit to this:
this.setState({session: this.state.ua.call('sip:'+number+'#'+GLOBAL.jssip_server, options)}, () =>{
this.state.session.connection.addEventListener('addstream', (event: any) => {
this.audioElement.srcObject = event.stream
this.audioElement.play()
})
})