Web media recorder event too late to trigger - reactjs

I am working on a web app on React.js + flask which undergoes speech recognition, and requires both the transcripted message and the recorded audio to be passed to a handler.
My problem is this.parse(transcript, audio) always passes a null object for audio. Debugging on Chrome indicates mediaRecorder.onstop and mediaRecorder.ondataavailable is only triggered after this.parse(transcript, audio) is handled so nothing is assigned to audio.
Is there a way where I can capture the audio from mediaRecorder before this.parse(transcript, audio) is executed?
Also, which variable should I pass (audio, audioURL, blob...) for the actual audio file?
class MessageParser {
constructor(actionProvider, state) {
var audio = null, blob = null, audioURL = null, mediaRecorder = null;
let chunks = [];
this.actionProvider = actionProvider;
this.state = state;
this.recognition = new SpeechRecognition()
this.recognition.continuous = false
this.recognition.interimResults = false
this.recognition.lang = 'en-US'
this.recognition.maxAlternatives = 1;
this.recognition.start()
this.recognition.onstart = function(){
if (navigator.mediaDevices.getUserMedia) {
//console.log('getUserMedia supported.');
let chunks = [];
var options = {
audioBitsPerSecond: 128000,
mimeTyoe: 'audio/webm'
}
navigator.mediaDevices.getUserMedia({ audio: true, })
.then(function (stream) {
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.start();
})
} else {
console.log('getUserMedia Unsupported.');
}
};
this.recognition.onresult = (e) => {
const transcript = e.results[0][0].transcript;
var audioURL = null;
this.recognition.abort()
mediaRecorder.stop();
mediaRecorder.onstop = function (e) {
console.log("data available after MediaRecorder.stop() called.");
audio = document.createElement('audio');
blob = new Blob(chunks, { 'type': 'audio/webm; codecs=opus' });
audioURL = window.URL.createObjectURL(blob);
audio.src = audioURL;
console.log("recorder stopped");
const recording = new Audio(audioURL)
recording.play()
}
mediaRecorder.ondataavailable = function (e) {
chunks.push(e.data);
}
this.parse(transcript, audio)
}
}

Related

Memory heap keeps increasing using ag-grid, angular and websocket

We needed to create a Live Monitor sort of screen that gets the feed through a WebSocket. Angular 11 is used for the UI part. When the page is left on Chrome for a few minutes, the memory heap starts increasing and gradually it increases to a greater extent. After some time, the application will hang and we can't go to another page of the application.
I'm unable to understand the cause of the memory leak, if any.
HTML Code:
<ag-grid-angular #LiveHedgeGrid class="ag-theme-balham" [rowData]="hedgeRowData" [columnDefs]="hedgeColumn" (gridReady)="onLiveHedgeReady($event)" (columnRowGroupChanged)="oncolumnRowGroupChanged($event)" (gridSizeChanged)="onGridSizeChanged($event)"
[enableCellChangeFlash]="true" [rowBuffer]="10" [debounceVerticalScrollbar]="true" [suppressColumnVirtualisation]="true" [groupIncludeTotalFooter]="true" [gridOptions]="gridOptions" [suppressAggFuncInHeader]="true" [groupDefaultExpanded]="groupDefaultExpanded"
[domLayout]="domLayout">
</ag-grid-angular>
TypeScript Code:
websocketCall() {
let socket = new WebSocket(ApiService.webSocketUrl);
socket.onopen = e => {
};
socket.onmessage = e => {
let server_message;
try {
server_message = JSON.parse(e.data);
server_message = JSON.parse(server_message);
if (server_message instanceof Array) {
this.bindTableValues(server_message);
} else {
this.bindTableValues([server_message]);
}
} catch (e) {
this.bindTableValues(server_message);
}
// console.log('socket open');
};
socket.onclose = () => {
//console.log('Web Socket Connection Closed');
};}
async bindTableValues(server_message) {
await server_message.forEach(element => {
this.ricData = {};
let ricPeriod = '';
let itemsToUpdate = [];
let data = {};
let value = 0;
let ricData = this.ricList[element['RIC']];
if (ricData) {
if (ricData['type'] == 'swap') {
value = element['Fields']['NETCHNG_1'];
ricPeriod = ricData['disp_name'];
ricPeriod = ricPeriod.toString().trim().substring(0, ricPeriod.length - 1).toLowerCase();
if (value) {
//const itemsToUpdate: any[] = [];
this.gridApi.forEachNodeAfterFilterAndSort((rowNode) => {
if(!rowNode.group) {
data = rowNode.data;
if(data['Tenor'] == ricPeriod) {
data['LivePnL'] = parseFloat(data['DV01']) * value * 100;
itemsToUpdate.push(data);
}
}
});
// this.gridApi.applyTransaction({ update: itemsToUpdate })!;
// this.gridApi.applyTransactionAsync({ update: itemsToUpdate })!;
this.gridApi.batchUpdateRowData({ update: itemsToUpdate })!;
};
}
};
});}
ngOnDestroy(): void {
try {
//console.log('Destroy ' + this.socket.readyState);
// if (this.socket.readyState === WebSocket.OPEN) {
if (this.socket.readyState === 1) {
this.socket.close();
}
this.getRic.unsubscribe();
this.getTable.unsubscribe();
}
catch (e) {
console.log(e);
}}

how to encode buffer to sent it to server (icecast) using mediaDevices.getUserMedia

I am trying to stream my browser (chrome) microphone using Icecast.
I think my problem that i need to encode the output data
so how can i make an audio stream captured in browser using to streamed live via icecast ? i'm using liquidsoap
I want to send cooredt output to server via websocket.
start.addEventListener("click", () => {
socket = new WebSocket(localURL, "webcast");
socket.onopen = function () {
socket.send(
JSON.stringify({
type: "hello",
data: hello,
})
);
};
navigator.mediaDevices.getUserMedia(constraintObj).then((stream) => {
const context = new AudioContext();
stream.getTracks().forEach((track) => (track.enabled = true));
var source = context.createMediaStreamSource(stream);
var processor = source.context.createScriptProcessor(4096, 2, 2);
source.connect(processor);
processor.connect(context.destination);
processor.onaudioprocess = function (e) {
// get mic data
var left = e.inputBuffer.getChannelData(0);
sendData(left);
};
});
});
const sendData = function (data) {
if (!((data != null ? data.length : void 0) > 0)) {
return;
}
if (!(data instanceof ArrayBuffer)) {
data = data.buffer.slice(
data.byteOffset,
data.length * data.BYTES_PER_ELEMENT
);
}
return socket.send(data);
};

I wish I could loop this array of videos and images, how can I do?

In this demo http://jsfiddle.net/mdz82oLn/ I have inserted videos (IDs) and images (png, jpeg, etc) which are also displayed by means of customized buttons. The demo works perfectly in the vision but reached the end of the last video it returns in loop when instead it should restart from the first video in the list, how to integrate this function?
var tag = document.createElement('script');
tag.src = "https://www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
const playerElement = document.querySelector('#player');
const imageElement = document.querySelector('#slide');
const videos = {
'RGpr3Y6Q-1M': 'http://nothingbutgeek.com/wp-content/uploads/2018/06/automata_16x9.png',
'btxdcqLOGuc': 'https://live.staticflickr.com/2400/2078946248_d063d5a563_b.jpg',
'CIx0a1vcYPc': 'https://i.ytimg.com/vi/CIx0a1vcYPc/maxresdefault.jpg',
};
const videoIds = Object.keys(videos);
function onYouTubeIframeAPIReady() {
function onPlayerReady({ target }) {
var playButton = document.getElementById("play-button");
playButton.addEventListener("click", function() {
target.playVideo();
});
var pauseButton = document.getElementById("pause-button");
pauseButton.addEventListener("click", function() {
target.pauseVideo();
});
var next = document.getElementById("next");
next.addEventListener("click", function() {
target.nextVideo();
});
var pre = document.getElementById("previous");
pre.addEventListener("click", function() {
target.previousVideo();
});
target.loadPlaylist({
playlist: videoIds
});
}
function onPlayerStateChange({ data, target }) {
switch(data) {
case YT.PlayerState.ENDED:
target.nextVideo();
break;
case YT.PlayerState.BUFFERING:
const playlist = target.getPlaylist();
const playlistIndex = target.getPlaylistIndex();
const currentId = playlist[playlistIndex];
const image = videos[currentId];
if (imageElement.src !== image) {
imageElement.src = image;
}
break;
}
}
const player = new YT.Player(playerElement, {
height: '405',
width: '720',
playerVars: {
controls: 1,
},
events: {
'onReady': onPlayerReady,
'onStateChange': onPlayerStateChange
}
});
}
If I understand correctly you are stuck on last video and want to start the loop from 1 video:
In that case a change like this should work:
var next = document.getElementById("next");
next.addEventListener("click", function() {
target.nextVideo();
if(target.getPlaylistIndex() == videoIds.length -1){
target.loadPlaylist({
playlist: videoIds
});
}
});
Updated your Example
you can also update the switch case like this:
case YT.PlayerState.ENDED: {
if (target.getPlaylistIndex() == videoIds.length - 1) {
target.loadPlaylist({
playlist: videoIds
});
}
break;
}

video recording issue in reactjs

I have implemented video recording functionality in reactjs by using MediaDevices.getUserMedia() but audio quality is not good, System is creating lots of annoying voice from a background that is very irritable.
link:https://devionashell.azurewebsites.net/uco
startVideos = async () => {
stopCountdown = false;
this.setState({
open: true
});
var constraints = {
audio: {
sampleRate: 44800,
channelCount: 2,
volume: 0.2,
autoGainControl: false,
echoCancellation: false,
noiseSuppression: false,
googleAutoGainControl: false,
sampleSize: 16
},
video: {
facingMode: "environment"
}
}
navigator.mediaDevices.getUserMedia = navigator.mediaDevices.getUserMedia ||
navigator.mediaDevices.webkitGetUserMedia ||
navigator.mediaDevices.mozGetUserMedia;
if (navigator.mediaDevices.getUserMedia) {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
// show it to user
vid = stream;
this.video.srcObject = stream;
this.video.volume = 0.1;
// var vid_volume = document.getElementById("myVideo");
console.log('vid_volume')
// console.log(vid_volume)
console.log('vid_volume')
//this.video.volumeObject = 0.2;
this.video.play();
// init recording
this.mediaRecorder = new MediaRecorder(stream, {
mimeType: videoType,
});
// init data storage for video chunks
this.chunks = [];
// listen for data from media recorder
this.mediaRecorder.ondataavailable = e => {
if (e.data && e.data.size > 0) {
this.chunks.push(e.data);
}
};
this.render();
} else {
console.log("getUserMedia not supported");
}
}

Uncaught SyntaxError: Identifier 'socket' has already been declared in client file

I've created a React app where I'm using socket.io to connect to the Google Cloud Platform. When I run the client and the server I get following error in the client file:
Uncaught SyntaxError: Identifier 'socket' has already been declared
But I'm not quite sure why since the socket is a constant that is not defined as a global constant. Probably that's not the reason why is failing.
The following is the code of the client.js and the structure of the project:
//connection to socket
const io = require('socket.io-client');
const socket = io.connect('http://localhost:1337');
// ================= visibilitychange =================
// Set the name of the hidden property and the change event for visibility
var hidden, visibilityChange;
if (typeof document.hidden !== "undefined") { // Opera 12.10 and Firefox 18 and later support
hidden = "hidden";
visibilityChange = "visibilitychange";
} else if (typeof document.msHidden !== "undefined") {
hidden = "msHidden";
visibilityChange = "msvisibilitychange";
} else if (typeof document.webkitHidden !== "undefined") {
hidden = "webkitHidden";
visibilityChange = "webkitvisibilitychange";
}
// Warn if the browser doesn't support addEventListener or the Page Visibility API
if (typeof document.addEventListener === "undefined" || hidden === undefined) {
console.log("This demo requires a browser, such as Google Chrome or Firefox, that supports the Page Visibility API.");
} else {
// Handle page visibility change
document.addEventListener(visibilityChange, handleVisibilityChange, false);
}
//================= CONFIG =================
// Stream Audio
let bufferSize = 2048,
AudioContext,
context,
processor,
input,
globalStream;
//vars
let audioElement = document.querySelector('audio'),
finalWord = false,
resultText = document.getElementById('ResultText'),
removeLastWord = true,
streamStreaming = false;
//audioStream constraints
const constraints = {
audio: true,
video: false
};
//================= RECORDING =================
function initRecording() {
socket.emit('startGoogleCloudStream', ''); //init socket Google Speech Connection
streamStreaming = true;
AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();
processor = context.createScriptProcessor(bufferSize, 1, 1);
processor.connect(context.destination);
context.resume();
var handleSuccess = function (stream) {
globalStream = stream;
input = context.createMediaStreamSource(stream);
input.connect(processor);
processor.onaudioprocess = function (e) {
microphoneProcess(e);
};
};
navigator.mediaDevices.getUserMedia(constraints)
.then(handleSuccess);
}
function microphoneProcess(e) {
var left = e.inputBuffer.getChannelData(0);
var left16 = convertFloat32ToInt16(left);
socket.emit('binaryData', left16);
}
//================= INTERFACE =================
var startButton = document.getElementById("startRecButton");
if(startButton) {
startButton.addEventListener("click", startRecording);
}
var endButton = document.getElementById("stopRecButton");
if(endButton){
endButton.addEventListener("click", stopRecording)
endButton.disabled = true;
}
function startRecording() {
startButton.disabled = true;
endButton.disabled = false;
initRecording();
}
function stopRecording() {
// waited for FinalWord
startButton.disabled = true;
endButton.disabled = true;
streamStreaming = false;
socket.emit('endGoogleCloudStream', '');
let track = globalStream.getTracks()[0];
track.stop();
input.disconnect(processor);
processor.disconnect(context.destination);
context.close().then(function () {
input = null;
processor = null;
context = null;
AudioContext = null;
startButton.disabled = false;
});
}
//================= SOCKET IO =================
socket.on('connect', function (data) {
console.log('client connected');
socket.emit('join', 'Server Connected to Client');
});
socket.on('endmessages', function (data) {
console.log(data);
resultText.appendChild(document.createTextNode(data));
});
socket.on('speechData', function (data) {
// console.log(data.results[0].alternatives[0].transcript);
// console.log(data.results[0]);
var dataFinal = undefined || data.results[0].isFinal;
if (dataFinal === false) {
// console.log(resultText.lastElementChild);
if (removeLastWord) { resultText.lastElementChild.remove(); }
removeLastWord = true;
//add empty span
let empty = document.createElement('span');
resultText.appendChild(empty);
let edit = data.results[0].alternatives[0].transcript;
resultText.lastElementChild.appendChild(document.createTextNode(edit));
resultText.lastElementChild.appendChild(document.createTextNode('\u00A0'));
} else if (dataFinal === true) {
resultText.lastElementChild.remove();
//add empty span
let empty = document.createElement('span');
var br = document.createElement("br");
resultText.appendChild(empty);
let edit = data.results[0].alternatives[0].transcript;
resultText.lastElementChild.appendChild(document.createTextNode(edit));
resultText.lastElementChild.appendChild(document.createTextNode('\u00A0'));
resultText.appendChild(br);
console.log("Google Speech sent 'final' Sentence.");
finalWord = true;
removeLastWord = false;
}
});
socket.on('end', function (data) {
console.log(data);
});
function handleVisibilityChange() {
if (document[hidden]) {
console.log('handleVisibilityChange:hidden');
if (streamStreaming) {
console.log("end");
socket.emit('leave', 'onbeforeunload stop');
stopRecording();
}
} else {
console.log('handleVisibilityChange:show');
startButton.disabled = false;
endButton.disabled = true;
}
}
//================= SANTAS HELPERS =================
// sampleRateHertz 16000 //saved sound is awefull
function convertFloat32ToInt16(buffer) {
let l = buffer.length;
let buf = new Int16Array(l / 3);
while (l--) {
if (l % 3 == 0) {
buf[l / 3] = buffer[l] * 0xFFFF;
}
}
return buf.buffer
}
This is the architecture of the project:
publicFolder
assetsFolder
cssFolder
fontsFolder
jsFolder
client.1.js
client.js
socket.io.js
socket.io.js.map
index.html
serverFolder
app.js //this is server
src
components
App.js //the wrapper component
ComponentA.js
ComponentB.js
App.css
index.js
resgisterServiceWorker.js
.babelrc
webpack.config.js
I think I've fixed, just by changing the connection to the socket from this:
//connection to socket
const io = require('socket.io-client');
const socket = io.connect('http://localhost:1337');
to this
//connection to socket
const socket = io('http://localhost:1337');

Resources