I am trying to run something in the background while an action is performed.
This is the element that's being updated. It's linked to an object of a user defined class called spline. On dragging the point the spline object is updated and that in turn updates the curve that you see.
I am trying to write a function that runs asynchronously in the background whenever spline is updated but doesn't hinder anything else, just executes and finishes on its own time. This is the test code I've written.
useEffect(() => {
async function asyncer() {
function wait(ms) {
var start = new Date().getTime();
var end = start;
while (end < start + ms) {
end = new Date().getTime();
}
}
const p = new Promise(function (resolve, reject) {
wait(1000);
resolve();
});
p.then(() => console.log("hello"));
}
asyncer();
}, [spline]);
But this does hinder the spline from being updated in time. Here's what happens.
The spline doesn't update until the function has completed executing.
Very simply, javascript is single threaded, and this function
function wait(ms) {
var start = new Date().getTime();
var end = start;
while (end < start + ms) {
end = new Date().getTime();
}
}
is blocking the event loop. This prevent javascript from executing anything else. What you can do is implement this as a promise instead,
function wait(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
Related
I've only been using JS and React for a short time, and am running into issues with waiting for a forEach loop to complete before continuing.
The function glitchLib below should pull an array of img sources from state, iterate through the elements of the array and "glitch" each image (the actual process of glitching is done with a javascript library). For each glitched image, I want to push a 2-elem array with the original source and glitched source into currentSaved[], and then pass the array of arrays in a callback.
glitchLib() {
const currentSaved = [];
var array = this.state.originalFiles;
array.forEach(function(src) {
var originalImage = src;
const image = new Image();
image.src = src;
image.onload = () => {
glitch()
.fromImage(image)
.toDataURL()
.then((dataURL) => {
const dataArray = [originalImage, dataURL];
currentSaved.push(dataArray);
});
};
});
this.props.callback(currentSaved);
}
If I wrap the callback in a setTimeout for ~10 seconds or so, the array is properly iterated through so there isn't any issue with the way the js library is performing the "glitching", which should just return a base64 image encoding. Without the setTimeout, an empty array is passed.
What is the proper way to wait for the array to be fully iterated through (or for that matter, is there any better way of doing this sort of thing)?
You can wait for the completion of a number of Promises using Promise.all():
const glitch = () => Promise.resolve('xyz')
function glitchLib(callback) {
const promises = []
const array = ['abc', 'def']
array.forEach(src => {
const originalImage = src
const image = new Image()
image.src = src
/*image.onload = */;(() => {
promises.push(
glitch()
//.fromImage(image)
//.toDataURL()
.then(dataURL => [originalImage, dataURL])
)
})()
})
Promise.all(promises)
.then(currentSaved => callback(currentSaved))
}
glitchLib(x => console.log(x))
I've been working on node.js for a short time and currently have the following problem: I would like to iterate through an array and send an http request for each element in the array. These requests should be executed every 5 seconds. In addition to this, this should be done max. 10 times.
That means every 5 seconds an http request should be sent for all elements in the array.
I have already tried a branched async for and for each loop, but I am not getting the desired result.
I know where the problem is with my code but can't find another solution.
It looks something like this:
// Set for loop
for (var i = 0; i <= 10; i++) {
setTimeout(function () {
// Iterate through every element within the array
sendItems.forEach(function (arrayItem) {
// Some code
request (.....)
// Exit condition
if (sendItems[sendItems.length - 1].status === 'Failed'|||
sendItems[sendItems.length - 1].status ==='Successful') {
if (cbFlag === false) {
interval = 0;
resolve(sendItems);
}
}
});
}interval * i);
Assuming by request you're referring to the nodejs module. Here a possible solution using something strange like a sleep :)
const request = require('request');
const _request = require('util').promisify(request);
let sendItems = [1,2,3];
let sleep = (ms) => {
return new Promise(function (resolve, reject) {
setTimeout(function () {
console.log(`waiting ${ms/1000} sec..`)
resolve();
}, ms);
})
}
(async function loop() {
for (let i = 0; i < 10; i++) {
for (let id of sendItems) {
let r = await _request('https://jsonplaceholder.typicode.com/posts/'+id, { json: true })
console.log(r.body.id);
}
await sleep(5000);
} console.log("ok that's it")
})();
whenever i am iterating for each loop for making rest call, iteration will get finished before getting even first response..... can you please help me in that.
var list = [1,2,3];
$scope.fun1 = function(list) {
forEach(list,function(value,key)) {
console.log("Start");
restCall(url,function(response) { //each rest request will tack 30sec.
console.log(response); // response = 'end'
});
};
};
getting output:
Start
Start
Start
end
end
end
reuired output:
Start
end
Start
end
Start
end
Your rest call needs to return a promise you can watch on. Then you need to build a queue and register them synchronously. So you have a variable and chain your single calls. this could look sth like this
var list = [1,2,3];
var queue = new Promise(resolve => resolve());
$scope.fun1 = function(list) {
forEach(list, function(value,key)) {
console.log("Start");
queue = queue.then(() => {
return restCall(url,function(response) { //each rest request will tack 30sec.
console.log(response); // response = 'end'
});
});
};
};
If you want to continue the queue even if one request fails, you need to add a .catch() to catch the error
I'm not sure what do you want to achieve but in order to get required output I would suggest using recurrsion.
function callApi(list){
if(!list.length){
return;
}
console.log('start');
restApi(url, function(){
console.log('end');
callApi(list.splice(1));
});
}
sorry for the title, I was trying to be specific but it's a bit confusing. Actually the piece of code should be the better way to show you what my problem is:
var i = 0
var jsonArray = []
var startTime = new Date().getTime()
var writeFile = setInterval(function () {
var currentTime = new Date().getTime()
var stopTime = 31 * 1000
var executeScript = setInterval(function () {
// the script will stop after some time
if (currentTime - startTime > stopTime) { // time in ms: 60 * 60 * 1000 = 1 hour
process.exit()
}
// making request and fetching data
http.get(request, function (response) {
response.pipe(bl(function (err, data) {
if (err) return console.error(err)
response.setEncoding('utf8')
parseString(data, function (err, result) {
result = result.SoccerFeed.SoccerDocument[0].MatchData[0].TeamData
// creating object and filling it with received data
result = createJsObject(result)
// converting object in json format
var jsonObject = JSON.stringify(result, null, 4)
jsonArray.push(jsonObject)
// copying json into a new file without overwriting previous written data
})
}))
}).on('error', console.error)
}, 1 * 1000) // interval in ms: 90 * 1000 = 90 seconds / 1 min 30
fs.writeFile('API - Opta - ' + moment().format('YYYY - DD.MM (HH:mm:ss)') + '.txt', jsonArray, function(err) {
if (err) throw err
})
jsonArray = []
i++
}, 10 * 1000)
The problem is that the array I'm pushing in the file is keeping data from the old one, even when I clean it with jsonArray = []. I know I'm doing something wrong but I don't know what.
Thanks in advance for your help
So, at first I thought it was an asynchronous problem with fs.writeFile getting a blank copy of jsonArray. This is not the case due to the way that javascript handles jsonArray = []. When you make the call to fs.writeFile, you pass it jsonArray which is passed by "reference." When you set it to jsonArray = [] you actually create an entire new object. The object that was passed to fs.writeFile is no longer referenced by jsonArray and it's only scope is in the call to fs.writeFile thus it maintains the state that you passed in. If you had instead called jsonArray.length = 0 it would have always output an empty array to the file because that would have overwritten the actual array fs.writeFile. Check out this post to see that: How do I empty an array in JavaScript?
In short, that's not the problem.
But the actual problem is that http.get is receiving data many times after the file was written. Those things won't get written until your next loop. Your described sequence that things are always one behind makes perfect sense.
Your sequence is this inside your interval executeScript:
Check to make sure it's not quitting time
Start http.get
sometime in the future the response is received and added to jsonArray
Start fs.writeFile, contents of jsonArray locked in at this point in time as to what is going to be written to the file
sometime in future the file is written
Set jsonArray = []
I would propose you have two separate intervals that aren't nested like so:
var i = 0
var jsonArray = []
var startTime = new Date().getTime()
var writeFile = setInterval(function () {
fs.writeFile('API - Opta - ' + moment().format('YYYY - DD.MM (HH:mm:ss)') + '.txt', jsonArray, function(err) {
if (err) throw err
})
jsonArray = []
i++
}, 10 * 1000)
var executeScript = setInterval(function () {
var currentTime = new Date().getTime()
var stopTime = 31 * 1000
// the script will stop after some time
if (currentTime - startTime > stopTime) { // time in ms: 60 * 60 * 1000 = 1 hour
process.exit()
}
// making request and fetching data
http.get(request, function (response) {
response.pipe(bl(function (err, data) {
if (err) return console.error(err)
response.setEncoding('utf8')
parseString(data, function (err, result) {
result = result.SoccerFeed.SoccerDocument[0].MatchData[0].TeamData
// creating object and filling it with received data
result = createJsObject(result)
// converting object in json format
var jsonObject = JSON.stringify(result, null, 4)
jsonArray.push(jsonObject)
// copying json into a new file without overwriting previous written data
})
}))
}).on('error', console.error)
}, 1 * 1000) // interval in ms: 90 * 1000 = 90 seconds / 1 min 30
Now you have two intervals going at the same time. The 1 second loop on executing the script now has lots of data queued up for when the 10 second loop on writing the file kicks off. It writes the file and then immediately clears jsonArray for it to keep adding in subsequent 1 second loops of the executeScript interval.
Not sure if it helps, but place this line
var jsonArray = []
after this
var writeFile = setInterval(function () {
and you won't need to clear jsonArray.
so it will be
var writeFile = setInterval(function () {
var jsonArray = []
// ...
I have a pretty straight-forward problem where I'm :
Iterating through a series of dashboard "widgets" using _.each().
Calling a function to refresh the current widget, and returning a $q promise.
Now, my issue is that I would like each iteration to WAIT prior to continuing to the next iteration.
My first version was this, until I realized that I need to wait for updateWidget() to complete:
_.each(widgets, function (wid) {
if (wid.dataModelOptions.linkedParentWidget) {
updateWidget(wid, parentWidgetData);
}
});
My second version is this one, which returns a promise. But of course, I still have the problem where the iteration continues without waiting :
_.each(widgets, function (wid) {
if (wid.dataModelOptions.linkedParentWidget) {
updateWidget(wid, parentWidgetData).then(function(data){
var i = 1;
});
}
});
and the called function which returns a deferred.promise object (then makes a service call for widget data) :
function updateWidget(widget, parWidData) {
var deferred = $q.defer();
// SAVE THIS WIDGET TO BE REFRESHED FOR THE then() SECTION BELOW
$rootScope.refreshingWidget = widget;
// .. SOME OTHER VAR INITIALIZATION HERE...
var url = gadgetDataService.prepareAggregationRequest(cubeVectors, aggrFunc, typeName, orderBy, numOrderBy, top, filterExpr, having, drillDown);
return gadgetDataService.sendAggGetRequest(url).then(function (data) {
var data = data.data[0];
var widget = {};
if ($rootScope.refreshingWidget) {
widget = $rootScope.refreshingWidget;
}
// BUILD KENDO CHART OPTIONS
var chartOptions = chartsOptionsService.buildKendoChartOptions(data, widget);
// create neOptions object, then use jquery extend()
var newOptions = {};
$.extend(newOptions, widget.dataModelOptions, chartOptions);
widget.dataModelOptions = newOptions;
deferred.resolve(data);
});
return deferred.promise;
}
I would appreciate your ideas on how to "pause" on each iteration, and continue once the called function has completed.
thank you,
Bob
******* UPDATED ************
My latest version of the iteration code include $q.all() as follows :
// CREATE ARRAY OF PROMISES !!
var promises = [];
_.each(widgets, function (wid) {
if (wid.dataModelOptions.linkedParentWidget) {
promises.push(updateWidget(wid, parentWidgetData));
}
});
$q.all(promises)
.then(function () {
$timeout(function () {
// without a brief timeout, not all Kendo charts will properly refresh.
$rootScope.$broadcast('childWidgetsRefreshed');
}, 100);
});
By chaining promises
The easiest is the following:
var queue = $q.when();
_.each(widgets, function (wid) {
queue = queue.then(function() {
if (wid.dataModelOptions.linkedParentWidget) {
return updateWidget(wid, parentWidgetData);
}
});
});
queue.then(function() {
// all completed sequentially
});
Note: at the end, queue will resolve with the return value of the last iteration
If you write a lot of async functions like this, it might be useful to wrap it into a utility function:
function eachAsync(collection, cbAsync) {
var queue = $q.when();
_.each(collection, function(item, index) {
queue = queue.then(function() {
return cbAsync(item, index);
});
});
return queue;
}
// ...
eachAsync(widgets, function(wid) {
if (wid.dataModelOptions.linkedParentWidget) {
return updateWidget(wid, parentWidgetData);
}
}).then(function() {
// all widgets updated sequentially
// still resolved with the last iteration
});
These functions build a chain of promises in the "preprocessing" phase, so your callback is invoked sequentially. There are other ways to do it, some of them are more efficient and use less memory, but this solution is the simplest.
By delayed iteration
This method will hide the return value even of the last iteration, and will not build the full promise chain beforehands. The drawback is that, it can be only used on array like objects.
function eachAsync(array, cbAsync) {
var index = 0;
function next() {
var current = index++;
if (current < array.length) {
return $q.when(cbAsync(array[current], current), next);
}
// else return undefined
}
// This will delay the first iteration as well, and will transform
// thrown synchronous errors of the first iteration to rejection.
return $q.when(null, next);
}
This will iterate over any iterable:
function eachAsync(iterable, cbAsync) {
var iterator = iterable[Symbol.iterator]();
function next() {
var iteration = iterator.next();
if (!iteration.done) {
// we do not know the index!
return $q.when(cbAsync(iteration.value), next);
} else {
// the .value of the last iteration treated as final
// return value
return iteration.value;
}
}
// This will delay the first iteration as well, and will transform
// thrown synchronous errors of the first iteration to rejection.
return $q.when(null, next);
}
Keep in mind that these methods will behave differently when the collection changes during iteration. The promise chaining methods basically build a snapshot of the collection at the moment it starts iteration (the individual values are stored in the closures of the chained callback functions), while the latter does not.
Instead of trying to resolve each promise in your _.each(), I would build out an array of promises in your _.each to get an array like:
promises = [gadgetDataService.sendAggGetRequest(url1), gadgetDataService.sendAggGetRequest(url2)....]
Then resolve them all at once, iterate through the results and set your models:
$q.all(promises).then(function(results){ // iterate through results here })