How to push an array to a file without keeping old data when looping in setInterval? - arrays

sorry for the title, I was trying to be specific but it's a bit confusing. Actually the piece of code should be the better way to show you what my problem is:
var i = 0
var jsonArray = []
var startTime = new Date().getTime()
var writeFile = setInterval(function () {
var currentTime = new Date().getTime()
var stopTime = 31 * 1000
var executeScript = setInterval(function () {
// the script will stop after some time
if (currentTime - startTime > stopTime) { // time in ms: 60 * 60 * 1000 = 1 hour
process.exit()
}
// making request and fetching data
http.get(request, function (response) {
response.pipe(bl(function (err, data) {
if (err) return console.error(err)
response.setEncoding('utf8')
parseString(data, function (err, result) {
result = result.SoccerFeed.SoccerDocument[0].MatchData[0].TeamData
// creating object and filling it with received data
result = createJsObject(result)
// converting object in json format
var jsonObject = JSON.stringify(result, null, 4)
jsonArray.push(jsonObject)
// copying json into a new file without overwriting previous written data
})
}))
}).on('error', console.error)
}, 1 * 1000) // interval in ms: 90 * 1000 = 90 seconds / 1 min 30
fs.writeFile('API - Opta - ' + moment().format('YYYY - DD.MM (HH:mm:ss)') + '.txt', jsonArray, function(err) {
if (err) throw err
})
jsonArray = []
i++
}, 10 * 1000)
The problem is that the array I'm pushing in the file is keeping data from the old one, even when I clean it with jsonArray = []. I know I'm doing something wrong but I don't know what.
Thanks in advance for your help

So, at first I thought it was an asynchronous problem with fs.writeFile getting a blank copy of jsonArray. This is not the case due to the way that javascript handles jsonArray = []. When you make the call to fs.writeFile, you pass it jsonArray which is passed by "reference." When you set it to jsonArray = [] you actually create an entire new object. The object that was passed to fs.writeFile is no longer referenced by jsonArray and it's only scope is in the call to fs.writeFile thus it maintains the state that you passed in. If you had instead called jsonArray.length = 0 it would have always output an empty array to the file because that would have overwritten the actual array fs.writeFile. Check out this post to see that: How do I empty an array in JavaScript?
In short, that's not the problem.
But the actual problem is that http.get is receiving data many times after the file was written. Those things won't get written until your next loop. Your described sequence that things are always one behind makes perfect sense.
Your sequence is this inside your interval executeScript:
Check to make sure it's not quitting time
Start http.get
sometime in the future the response is received and added to jsonArray
Start fs.writeFile, contents of jsonArray locked in at this point in time as to what is going to be written to the file
sometime in future the file is written
Set jsonArray = []
I would propose you have two separate intervals that aren't nested like so:
var i = 0
var jsonArray = []
var startTime = new Date().getTime()
var writeFile = setInterval(function () {
fs.writeFile('API - Opta - ' + moment().format('YYYY - DD.MM (HH:mm:ss)') + '.txt', jsonArray, function(err) {
if (err) throw err
})
jsonArray = []
i++
}, 10 * 1000)
var executeScript = setInterval(function () {
var currentTime = new Date().getTime()
var stopTime = 31 * 1000
// the script will stop after some time
if (currentTime - startTime > stopTime) { // time in ms: 60 * 60 * 1000 = 1 hour
process.exit()
}
// making request and fetching data
http.get(request, function (response) {
response.pipe(bl(function (err, data) {
if (err) return console.error(err)
response.setEncoding('utf8')
parseString(data, function (err, result) {
result = result.SoccerFeed.SoccerDocument[0].MatchData[0].TeamData
// creating object and filling it with received data
result = createJsObject(result)
// converting object in json format
var jsonObject = JSON.stringify(result, null, 4)
jsonArray.push(jsonObject)
// copying json into a new file without overwriting previous written data
})
}))
}).on('error', console.error)
}, 1 * 1000) // interval in ms: 90 * 1000 = 90 seconds / 1 min 30
Now you have two intervals going at the same time. The 1 second loop on executing the script now has lots of data queued up for when the 10 second loop on writing the file kicks off. It writes the file and then immediately clears jsonArray for it to keep adding in subsequent 1 second loops of the executeScript interval.

Not sure if it helps, but place this line
var jsonArray = []
after this
var writeFile = setInterval(function () {
and you won't need to clear jsonArray.
so it will be
var writeFile = setInterval(function () {
var jsonArray = []
// ...

Related

node.js Iterate through an array in intervall for certain times

I've been working on node.js for a short time and currently have the following problem: I would like to iterate through an array and send an http request for each element in the array. These requests should be executed every 5 seconds. In addition to this, this should be done max. 10 times.
That means every 5 seconds an http request should be sent for all elements in the array.
I have already tried a branched async for and for each loop, but I am not getting the desired result.
I know where the problem is with my code but can't find another solution.
It looks something like this:
// Set for loop
for (var i = 0; i <= 10; i++) {
setTimeout(function () {
// Iterate through every element within the array
sendItems.forEach(function (arrayItem) {
// Some code
request (.....)
// Exit condition
if (sendItems[sendItems.length - 1].status === 'Failed'|||
sendItems[sendItems.length - 1].status ==='Successful') {
if (cbFlag === false) {
interval = 0;
resolve(sendItems);
}
}
});
}interval * i);
Assuming by request you're referring to the nodejs module. Here a possible solution using something strange like a sleep :)
const request = require('request');
const _request = require('util').promisify(request);
let sendItems = [1,2,3];
let sleep = (ms) => {
return new Promise(function (resolve, reject) {
setTimeout(function () {
console.log(`waiting ${ms/1000} sec..`)
resolve();
}, ms);
})
}
(async function loop() {
for (let i = 0; i < 10; i++) {
for (let id of sendItems) {
let r = await _request('https://jsonplaceholder.typicode.com/posts/'+id, { json: true })
console.log(r.body.id);
}
await sleep(5000);
} console.log("ok that's it")
})();

How do you find all Users *not* in a Parse.com Query?

I have a "Logs" table that has a pointer to a User in the User table. I am trying to "Find all users without a log entry for the last N days". The following query, however, seems to return all users instead of the expected value.
Here is my current query:
var logsFrom = function(start, end) {
// Query for all logs within the last 24 hours
var Log = Parse.Object.extend("Log");
var query = new Parse.Query(Log);
query.greaterThanOrEqualTo("createdAt", start);
query.lessThan("createdAt", end);
return query;
};
var nonLogsFound = function(request, response) {
Parse.Cloud.useMasterKey();
// Midnight of the current day
var end = new Date();
end.setHours(0, 0, 0, 0);
// Midnight of the previous day
var time = (5 * 24 * 3600 * 1000);
var start = new Date(end.getTime() - (time));
var count = 0;
var logQuery = logsFrom(start, end);
var userQuery = new Parse.Query(Parse.User);
userQuery.doesNotMatchKeyInQuery("objectId", "user", logQuery);
userQuery.find({
success: function(users) {
// This returns *all* users.
count = users.length;
response.success("Query complete! (" + count + " results)");
},
error: function(error) {
console.error(error);
response.error(error);
}
});
};
Thanks
Your query is matching an pointer ('user') against a string ('objectId'). That is why it's returning all results.
Maybe changing
userQuery.doesNotMatchKeyInQuery("objectId", "user", logQuery);
To
userQuery.doesNotMatchKeyInQuery("objectId", "user.objectId", logQuery);
I'm not sure if it supports keypaths though haven't tested.

How to execute promises "sync" and not in async way

I calling getBubblesUserAccess that returns json objects that are orderd in a special way. This results i wanna run a foreach and get other messages but there i wanna return them in "order". I know that it will run these async but there must be a way that i can force it to "sequential" execution. (above code is my last attempt to add a defer...)
Example
pseudo code - get my groups
{
"id":"016cd1fc-89a3-4e4a-9e6e-a102df1b03d9",
"parent":"53750396-7d26-41f3-913d-1b93276b9e09",
"name":"XX",
"createdBy":"c9c63080-2c5b-4e8e-a093-2cfcd628a9d0",
"hasWriteAccess":true,
"hasCreateAccess":false,
"hasDeleteAccess":false,
"hasAdminAccess":false,
"settingsBubbleId":"00000000-0000-0000-0000-000000000000"
},
{
"id":"016cd1fc-89a3-4e4a-9e6e-a102df1b03d9",
"parent":"53750396-7d26-41f3-913d-1b93276b9e09",
"name":"XX",
"createdBy":"c9c63080-2c5b-4e8e-a093-2cfcd628a9d0",
"hasWriteAccess":true,
"hasCreateAccess":false,
"hasDeleteAccess":false,
"hasAdminAccess":false,
"settingsBubbleId":"00000000-0000-0000-0000-000000000000"
}
From this result i wanna iterate over those parent id strings and call another service that respond with this.
pseudo code
for each group above call another service with parent id and get result. This result will be added to a new JSON object.
"messages":[
{
"id":"f1d1aeda-d4e2-4563-85d5-d954c335b31c",
"text":"asd",
"sent":"2015-09-10T22:31:09.897+00:00",
"sender":"6b9e404b-ef37-4d07-9267-3e7b2579003b",
"senderName":"XXX XXXX"
},
{
"id":"a7ac0432-e945-440e-91ce-185170cbf3de",
"text":"asd",
"sent":"2015-09-10T22:28:24.383+00:00",
"sender":"c9c63080-2c5b-4e8e-a093-2cfcd628a9d0",
"senderName":"ZZZZZ ZZZZ"
},
My problem is that my second foreach are running async (as it should) and i want it to resolve back in same order as first json object...
My code::
var loadBubblesAccess = function () {
if (vm.running && angular.isDefined(vm.running)) { return; }
vm.running = true;
vm.bubblesWithMessages = null;
return BubbleFactory.getBubblesUserAccess().then(function (bubblesAccessTo) {
return bubblesAccessTo;
});
},
loadSubBubbles = function (bubblesAccessTo) {
/**
* Result from chain method with all bubbles user has access to.
*/
var promiseArray = [];
//var promiseArrayError = [];
var i = 0;
/**
* Creates a defer object so that we will not resolve before for each loop has been gone thru.. async problems.
*/
var deferred = $q.defer();
angular.forEach(bubblesAccessTo, function (bubble) {
$log.error(JSON.stringify(bubblesAccessTo));
/**
* Get 20 because default thats default and cache and e-tags are done to that number..
*/
BubbleFactory.getBubbleMessages(bubble.id, 0, 20, false).then(function (data) {
i++;
if (data.messages.length > 0) {
promiseArray.push({ bubbleSortOrder: i, bubbleId: bubble.parent, bubbleName: bubble.name, bubbleMessagesId: bubble.id, bubbleMessages: smartTrim(data.messages[0].text, 400, ' ', ' ...'), bubbleMessagesSent: data.messages[0].sent });
}
else {
// console.log("YYYY::: " + bubble.parent);
promiseArray.push({ bubbleSortOrder:i, bubbleId: bubble.parent, bubbleName: bubble.name, bubbleMessagesId: bubble.id, bubbleMessages: 'Inget meddelande än..', bubbleMessagesSent: '' });
}
});
/**
* Check if we have gone thru all bubbles - when finished we resolve defer object.
*/
if(i===bubblesAccessTo.length)
{
deferred.resolve(promiseArray);
}
});
//$log.debug.log(promiseArray);
vm.bubblesWithMessages = promiseArray;
promiseArray.length = 0;
vm.running = false;
};
loadBubblesAccess().then(loadSubBubbles);
The $q service in AngularJS is described as "lightweight" because it only implements the functions 90% of people need. That keeps its code size small - at the expense of not being able to address requests like yours very easily.
If you have the option, try an alternative such as bluebird. Bluebird provides a reduce() function that can execute an array of promises serially, and return their results in the order they were requested. It makes this task straightforward because your result array will match your data array and you can match up the results very easily.
If you do NOT have that option, there is a standard (if not-exactly-simple) technique with promises where you build an array of the elements you want to promise, then call the processing function (that returns a Promise) on the first value (popped from the array). In the .finally() handler, call the processing function recursively with the next value until it is empty (or an error occurs).
Pseudo-code for this:
var valuesToProcess = [1, 2, 3],
results = [];
function processValue(val) {
myProcessingFunction(val).then(function(result) {
results.push(result);
}).catch(function(e) {
console.log('FAIL!', e);
}).finally(function() {
if (valuesToProcess.length > 0) {
processValue(valuesToProcess.shift());
} else {
// All done - do something with results here
}
});
}
// Note: No error checking done, assumes we have work to do...
processValue(valuesToProcess.shift());
You'll need to adapt this to your use-case but it's a simple technique that guarantees serial operation and result-handling.

How to use Bluebird promisification with generators + parallel promises

Trying to fire off mutiple requests off to the beats api using bluebird as well as koa for generators.
After reading some documentation I figured the following would work
var request = require('co-request'),
_ = require('lodash'),
Promise = require('bluebird');
request = Promise.promisifyAll(request);
module.exports.getTracks = function *tracks(){
firstCall = yield makeAPICall('users/' + me + '/mymusic/tracks?limit=150');
total = firstCall.body.info.total;
total -= 150;
tracks = firstCall.body.data;
//Beats only allows a maximum of 150 tracks per call
//If more tracks are needed then the remainder is called in sets of 150
var offset = 150;
while (total > 0) {
promises.push(makeAPICall('users/' + me + '/mymusic/tracks?limit=150&offset=' + offset));
offset += 150;
total -= 150;
}
var responses = yield(Promise.all(promises));
}
function makeAPICall (query){
var authOptions = {
url: 'https://partner.api.beatsmusic.com/v1/api/' + query,
headers: { 'Authorization': 'Bearer ' + accessToken },
json: true
};
return request.get(authOptions);
}
The method makeAPI call works as expected used with firstCall, but for some reason when I start placing the makeAPICall method into the array they never seem to execute. The variable responses yields out just an array of functions instead of an array of responses from the beats api. What do I need to change to make responses return an array of objects similar to that of firstCall?
Your using co-request which already converts callbacks to thunks, so there is no need to try and promisify things.
Here is a simplified runnable example, similar to your code, showing how to run api calls in parallel with Koa (which uses co under the hood).
When you yield an array, co will run any thunks/promises/generators etc in parallel.
var request = require('co-request'),
co = require('co');
co(function *(){
var results = yield getTracks();
results.forEach(function(result){
console.log(result.body);
})
}).then();
function * getTracks () {
var queries = [];
// swap out your queries here
queries.push(makeAPICall('5185415ba171ea3a00704eed'));
queries.push(makeAPICall('54fdc3c9862a3aab01dc95cf'));
queries.push(makeAPICall('54fdc3da862a3aa501dc95d0'));
// yielding an array returns an array of results
var results = yield queries;
return results;
}
function makeAPICall (query) {
var options = {
url: 'http://www.mocky.io/v2/' + query,
json: true
}
return request.get(options)
}

synchronous for loop with promises

I have the following function that works but not as I want it to since each iteration needs the results of the iteration before.
There are plenty of similar questions but I'm finding it hard to reduce the solutions down to a pattern.
How can I rewrite the following function so that each loop iteration "waits" for the one before it?
$scope.updateBarcode = function() {
var itemcodes = $scope.itemcode.split(';');
// doc is JSPDF document, fetch will add a new page to the supplied doc.
var doc;
//fetch will unshift the returned doc onto the supplied array.
var result = [];
for (var i = 0; i < itemcodes.length; i++) {
var promise = $scope.fetch(doc, itemcodes[i],result);
promise.then(function(){
doc = result[0];
if (i >= itemcodes.length) {
doc.save(itemcodes[0] + '.pdf');
};
})
};
}
You need to assign the new promise from then() to your variable so that you build up a chain:
promise = promise.then(...);

Resources