Tensorflow.js prediction result doesn't change - angularjs

I trained my model with Google Teachable Machines (Image) and inclueded the model into my Ionic Angular app. I loaded the model successfully and used the camera preview for predicting the class which is shown in the image from the camera.
The picture which is displayed in the canvas changes properly but the predict()-method returns the same result for every call.
import * as tmImage from '#teachablemachine/image';
...
async startPrediction() {
this.model = await tmImage.load(this.modelURL, this.metadataURL);
this.maxPredictions = this.model.getTotalClasses();
console.log('classes: ' + this.maxPredictions); //works properly
requestAnimationFrame(() => {
this.loop();
});
}
async loop() {
const imageAsBase64 = await this.cameraPreview.takeSnapshot({ quality: 60 });
const canvas = document.getElementById('output') as HTMLImageElement;
//image changes properly, I checked it with a canvas output
canvas.src = 'data:image/jpeg;base64,' + imageAsBase64;
const prediction = await this.model.predict(canvas);
for (let i = 0; i < this.maxPredictions; i++) {
const classPrediction =
prediction[i].className + ': ' + prediction[i].probability.toFixed(2);
//probability doesn't change, even if I hold the camera close over a trained image
}
requestAnimationFrame(() => {
this.loop();
});
}
The prediction result is e.g.: class1 = 0.34, class2 = 0.66 but doesn't change.
I hope you could help me to find my bug, thanks in advance!

The image has probably not yet been loaded before you are calling the prediction model. It has been discussed here and there
function load(url){
return new Promise((resolve, reject) => {
canvas.src = url
canvas.onload = () => {
resolve(canvas)
}
})
}
await load(base64Data)
// then the image can be used for prediction

Related

Problem with Detection in real time using - Yolov5 & Django & React

I am trying to get the percent of detection that showing in the video (when having a detection) that I am Streaming,
to the client and display it on a table for every moment and in the same second that detection happening, but for now I get only a little part of the data and it’s happening after a few seconds
I using Django to stream a video with, detection using yolov5 , and I display the video in the client using react.
my problem is that I create a function that give me the percent of the detection for my custom object when it's show in the video.
I want to use this data and display beside the video.
i need that every time and in the same moment ,that have a detect I’ll get this data and display It.but now it’s not work for me, it’s only send the data and display it for one/two times , and after few seconds . I’m sure that I have some problem with detection() function and maybe with the detection_percentage()
but unfortunately I am not found a way to solve it
views.py
def stream():
cap = cv2.VideoCapture(source)
model.iou=0.5
model.conf=0.15
while (cap.isOpened()):
ret, frame = cap.read()
if not ret:
print("Error: failed to capture image")
break
results = model(frame,augment=False,size=640)
for i in results.render():
data=im.fromarray(i)
data.save('demo.jpg')
det = results.pred[0]
annotator = Annotator(frame, line_width=2, pil=not ascii)
im0 = annotator.result()
image_bytes = cv2.imencode('.jpg', im0)[1].tobytes()
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + image_bytes + b'\r\n')
cap.release()
cv2.destroyAllWindows()
def detection():
model.iou=0.5
model.conf=0.15
cap = cv2.VideoCapture(source)
while (True):
ret, frame = cap.read()
if not ret:
print("Error: failed to capture image")
break
results = model(frame, augment=False,size=640)
det = results.pred[0]
if det is not None and len(det):
xywhs = xyxy2xywh(det[:, 0:4])
confs = det[:, 4]
clss = det[:, 5]
outputs = deepsort.update(xywhs.cpu(), confs.cpu(), clss.cpu(), frame)
for j, (output, conf) in enumerate(zip(outputs, confs)):
label = f'{conf:.2f}'
print(label)
return label
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
p1 = Process(target = stream)
p2 = Process(target = detection)
p1.start()
p2.start()
p1.join()
p2.join()
def video_feed(request):
return StreamingHttpResponse(stream(), content_type='multipart/x-mixed-replace; boundary=frame')
def detection_percentage(request):
return HttpResponse(detection())
client side
const Streamvideo = () => {
const urlStream = "http://127.0.0.1:8000/video_feed";
const urlDetaction="http://127.0.0.1:8000/detection_percentage";
const [data, setData] = useState("None");
const [children, setChildren] = useState([]);
const getPrecentOfDetection = async () => {
try {
const resp = await axios.get(urlDetaction);
console.log(resp.data);
setData(resp.data);
} catch (err) {
// Handle Error Here
console.error(err);
}
};
useEffect(() => {
getPrecentOfDetection ();
}, []);
useEffect(() => {
setChildren((prev) => [
...prev,
<div>
<h6>Detection:{data}</h6>
<h6> Cureent Time:{showTime}</h6>
</div>,
]);
}, [data, showTime]);
return (
<div>
<img className={css.img} src={urlStream} alt="" />
<div>
<div className={css.cat}>{children}</div>
</div>
</div>
);
};
export default Streamvideo;

Add tracks or create new Stream ending with Media Recorder ending up corrupted file

this is the last post which answer where my goal is in developing my project: RecordRTC with sending video chunks to server and record as webm or mp4 on server side. After recording the screen sharing stream, I decide to move forward to add video tracks to screen sharing. There are two ways for me to do it, by using addTracks function or create a new stream with contain the video from the screen sharing and audio from my media. However, both of them resulting me in the previous error in the aforementioned link: corrupted video.
FYI: Here is the link for anyone who wants to read more about Media Recorder: https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder
P/S: If you encourage me on switching to webRTC again, I would be appreciated if you could help me in solving the issue - the file ends up corrupted when using webRTC - in aforementioned link?
Here is the code from my backend and frontend:
Client code:
startMedia = ()=>{
this.setState({mediaState:"pending"})
navigator.mediaDevices.getDisplayMedia({video: true}).then( async (screenSharingStream)=>{
console.log(MediaRecorder.isTypeSupported('video/webm; codecs=vp8,opus'))
const socketIO = io.connect(baseURL,{query: {candidateID: this.candidateID,roundTest:this.roundTest }})
const mediaStream = await navigator.mediaDevices.getUserMedia({video:true,audio:true}).catch(e => {throw e});
const mediaReCorderoptions = {
videoBitsPerSecond : 128000,
audioBitsPerSecond:128000,
mimeType : 'video/webm; codecs=vp8,opus'
}
const [videoTrack] = screenSharingStream.getVideoTracks();
const [audioTrack] = mediaStream.getAudioTracks();
if (audioTrack && videoTrack)
videoTrack.addTrack(audioTrack)
const stream = new MediaStream([videoTrack, audioTrack]);
this.socketRef.current = socketIO;
this.mediaStream = mediaStream
this.screenSharingStream = stream
this.candidateVideoRef.current.srcObject = this.mediaStream;
this.mediaRecorder = new MediaRecorder(this.screenSharingStream,mediaReCorderoptions)
this.mediaRecorder.ondataavailable = function(event){
if (event && event.data.size>0){
const reader = new FileReader();
reader.onload = function(){
const dataURL = reader.result;
console.log('van chay')
const base64EncodedData = dataURL.split(',')[1];
//console.log(buffer)
socketIO.emit('SEND BLOB',base64EncodedData)
}
reader.readAsDataURL(event.data)
}
}
this.mediaRecorder.start(1000)
this.setState({mediaState:this.mediaRecorder.state})
}).catch(err=>{
console.log(err.name)
switch(err.name){
case 'NotAllowedError':
message.error('Candidate does not allow!!')
this.setState({mediaState:"Aborting"})
break;
default:
message.error('System Error. Please contact us!')
this.setState({mediaState:"Aborting"})
break;
}
})
}
stopMedia = () =>{
if (this.mediaStream){
this.mediaStream.getTracks().forEach((track)=>{
if (track.readyState==='live') {
track.stop()
this.candidateVideoRef.current.style.display='none';
}})
}
if (this.screenSharingStream) {
this.mediaRecorder.stop()
this.setState({mediaState:this.mediaRecorder.state})
}
}
Server code:
socket.on("SEND BLOB",chunk=>{
try {
//if (chunk instanceof Buffer){
const fileExtension = '.webm'
const dataBuffer = new Buffer(chunk, 'base64');
const fileStream = fs.createWriteStream(path.join(__dirname,'./videos/candidate/',candidateID + '-' + roundTest + fileExtension), {flags: 'a'});
fileStream.write(dataBuffer);
}
catch(e){
console.log(e)
}
})

Using a separate AudioContext / scriptProcessor Node in Wavesurfer

I'm attempting to use a separate context/script processor from Wavesurfer's default so I can manipulate the pitch of the audio independent of playback rate. When I attempt to give the context/script processor as parameters, and playback the audio, I don't get any sound.
My Waveform component:
const playbackEngine = new PlaybackEngine({
emitter: emitter,
pitch: pitch,
});
const Waveform = WaveSurfer.create({
audioContext: playbackEngine.context,
audioScriptProcessor: playbackEngine.scriptProcessor,
barWidth: 1,
cursorWidth: 1,
pixelRatio: 1,
container: '#audio-spectrum',
progressColor: '#03a9f4',
height: 100,
normalize: true,
responsive: true,
waveColor: '#ccc',
cursorColor: '#4a74a5'
});
// called in ComponentDidMount()
function loadMediaUrl(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
// Decode asynchronously
request.onload = async function() {
let buffer = request.response;
// sets arrayBuffer for Playback Engine
const audioBuff = await playbackEngine.decodeAudioData(buffer, (error) => {
console.error(`Error decoding audio:`, error);
});
// sets audioBuffer for Wavesurfer to render Waveform (where I believe the problem
// begins)
Waveform.loadDecodedBuffer(audioBuff);
// sets audioBuffer for Playback Engine to playback audio
playbackEngine.setBuffer(audioBuff);
}
request.send();
}
Playback.js
const {SimpleFilter, SoundTouch} = require('./soundtouch');
const BUFFER_SIZE = 4096;
class PlaybackEngine {
constructor({emitter, pitch}) {
this.emitter = emitter;
this.context = new (window.AudioContext || window.webkitAudioContext);
this.scriptProcessor = this.context.createScriptProcessor(BUFFER_SIZE, 2, 2);
this.scriptProcessor.onaudioprocess = e => {
const l = e.outputBuffer.getChannelData(0);
const r = e.outputBuffer.getChannelData(1);
const framesExtracted = this.simpleFilter.extract(this.samples, BUFFER_SIZE);
if (framesExtracted === 0) {
this.emitter.emit('stop');
}
for (let i = 0; i < framesExtracted; i++) {
l[i] = this.samples[i * 2];
r[i] = this.samples[i * 2 + 1];
}
};
this.soundTouch = new SoundTouch();
this.soundTouch.pitch = pitch;
this.duration = undefined;
}
get pitch() {
return this.soundTouch.pitch;
}
set pitch(pitch) {
this.soundTouch.pitch = pitch;
}
decodeAudioData(data) {
return this.context.decodeAudioData(data);
}
setBuffer(buffer) {
const bufferSource = this.context.createBufferSource();
bufferSource.buffer = buffer;
this.samples = new Float32Array(BUFFER_SIZE * 2);
this.source = {
extract: (target, numFrames, position) => {
this.emitter.emit('time', (position / this.context.sampleRate));
const l = buffer.getChannelData(0);
const r = buffer.getChannelData(1);
for (let i = 0; i < numFrames; i++) {
target[i * 2] = l[i + position];
target[i * 2 + 1] = r[i + position];
}
return Math.min(numFrames, l.length - position);
},
};
this.simpleFilter = new SimpleFilter(this.source, this.soundTouch);
this.duration = buffer.duration;
this.emitter.emit('duration', buffer.duration);
}
play() {
this.scriptProcessor.connect(this.context.destination);
}
pause() {
this.scriptProcessor.disconnect(this.context.destination);
}
seekPercent(percent) {
if (this.simpleFilter !== undefined) {
this.simpleFilter.sourcePosition = Math.round(
percent / 100 * this.duration * this.context.sampleRate
);
}
}
}
export default PlaybackEngine;
In this setup, with Waveform.play() I can cause playback from the wavesurfer instance but cannot manipulate the pitch. Similarly, with playbackEngine.play() I can manipulate the pitch but lose all Wavesurfer functionality.
Though I'm pretty sure the problem stems from Wavesurfer and my Playback Engine using two separate AudioBuffers, I need to set up the buffer in my playback context, as well as render the waveform with wavesurfer.
I'd like to see if anyone can confirm how to use the Playback Engine's context, script processor, and AudioBuffer to control the Wavesurfer instance (ie. having Waveform.play() play audio from the Playback Engine, as well as update the Wavesurfer UI).
All help is appreciated.
So I ended up manually removing
audioScriptProcessor: playbackEngine.scriptProcessor,
from the Wavesurfer initialization, then attaching playbackEngine's script processor to the destinationNode manually. I had a previously attempt set up like this, and heard annoying popping sounds during playback. What I thought was annoying sample/buffer errors was actually coming from an EventEmitter instance I had constantly broadcasting time between the files. Removing that solved my noise issue (ツ)

How to Upload Multiple Image Files in a Single Call in Reactjs

Other than workaround of calling fetch multiple times for multiple image files upload (looping through the files), on Frontend, How to upload multiple of image files by just calling fetch/Upload once? Could someone provide a simple example? Just like we do on Facebook.
Thanks in advance!
Update: I am done with looping logic in front end, now as there is loader on every image getting uploaded, Percent uploaded is getting calculated for all images in single value, how to split this value for all images separately?
Looping Logic
for (let i = 0; i <= e.target.files.length; i++){
let reader = new FileReader();
let file = e.target.files[i];
var self = this
reader.onloadstart = () => {
self.setState({ImageUploader: true})
}
reader.onloadend = () => {
var data = reader.result;
if (!file.type.includes('image')) {
alert('PLEASE CHOSE A IMAGE BRAH!')
} else if (file.size / (1024 * 1024) > 5) {
alert('PLEASE CHOSESmaller Image')
} else {
var url = 'https://api......'
var ifd = new FormData();
ifd.append('file', file)
axios({url: url,method: 'put',
onUploadProgress: function(progressEvent) {
var percentCompleted = Math.round((progressEvent.loaded * 100) / progressEvent.total);
self.setState({Completed: percentCompleted})
}, withCredentials: true, data: ifd}).then((res) => {
this.setState({ImageUploader: false})
this.setState({
image_id: this.state.image_id.concat(res.data.reason.image_id)
})
})
this.setState({
file: file,
imagePreviewUrl: this.state.imagePreviewUrl.concat(reader.result),
noImage: false,
ImageChoosen: true
});
}
}
reader.readAsDataURL(file)
}

amCharts not displaying when the page loads in AngularJs

I am using amCharts to display some data and calling this in my init function. The following is my code:
export default class ACtrl {
constructor($scope, $http, $state) {
var sampleCharts = function () {
let sampleChart;
let sampleBarGraph;
let sampleLine;
const writeAlarmsample = data => {
const {
total,
users
} = data;
// Set cumulative percentage
let runningTotal = 0;
users.forEach(user => {
runningTotal += user.assetAvailability;
user.cumulativePercentage = runningTotal / total * 100;
});
sampleChart.dataProvider = users;
sampleChart.write('userAvailability');
sampleChart.validateData();
};
function handleClick(event){
$state.go("app.userdetails", { userID: event.item.category });
}
// Alarm sample
AmCharts.ready(() => {
sampleChart = new AmCharts.AmSerialChart();
sampleChart.categoryField = "assetId";
//add click listener
sampleChart.addListener("clickGraphItem", handleClick);
var yAxis = new AmCharts.ValueAxis();
yAxis.position = "left";
sampleChart.addValueAxis(yAxis);
var yAxis2 = new AmCharts.ValueAxis();
yAxis2.position = "right";
sampleChart.addValueAxis(yAxis2);
sampleBarGraph = new AmCharts.AmGraph();
sampleBarGraph.valueField = "userAvailability";
sampleBarGraph.type = "column";
sampleBarGraph.fillAlphas = 1;
sampleBarGraph.lineColor = "#f0ab00";
sampleBarGraph.valueAxis = yAxis;
sampleChart.addGraph(sampleBarGraph);
sampleLine = new AmCharts.AmGraph();
sampleLine.valueField = "cumulativePercentage";
sampleLine.type = "line";
sampleLine.lineColor = "#cb0044";
sampleLine.valueAxis = yAxis2;
sampleChart.addGraph(sampleLine);
sampleChart.write('userAvailability');
$http.get(constants.LOCAL_HOST+"/dashboard/users")
.then(response => writeAlarmsample(response.data));
});
};
$scope.init = function() {
availabilityCharts();
};
})
}
The charts load fine when I hit the refresh button, but they are not loaded when the page gets loaded for the first time. I also have a refresh button which calls the function to load the charts even that does not load the charts. If I click on one of the chart items it takes me to details page and when I come back to this page the charts do not load. I have to click on refresh again to load the charts. Can anyone let me know what is the reason for this issue and how I can fix it.

Resources