Dynamically nested $resource Promises with ordered $q.all Promises - angularjs

2nd UPDATE
We are implementing a BulkEdit functionality which sends async CRUD requests to a Backend.
So what I require here is a dynamically created set of nested promises.
In an abstract version the data array could look like:
var objArr = [
{
name: 'A',
subs: [
{
id: 1,
_action: 'create'
},
{
id: 2,
_action: 'create'
},
{
id: 3,
_action: 'delete'
}
]
},
{
name: 'B',
subs: [
{
id: 4,
_action: 'create'
},
{
id: 5,
_action: 'put'
}
]
},
{
name: 'C',
subs: []
}
];
I try to illustrate how the requests should be sent for this data following the order given by '_action'.
Get some transaction ID (see below)
As soon as transaction ID is there start to send requests for every Object in the Array given the following rules:
Per Object send all 'delete' requests at once if there are any.
After that or if there weren't any 'delete' requests send all 'put'
requests if there are any.
After 'delete' and/or 'put' send all 'create' requests if there are any.
As soon as all requests for an Object are done, do something per Object.
As soon as all Objects are done, close the Transaction.
How is it possible to create this dynamic nested/non-nested promise chain?
UPDATED CODE contains now Promise Creation
When calling the function below, first a TransactionService gets a transaction id which is required to be sent with each request. When everything is successful, the Transaction will be closed.
My current issue is that promises are not resolved in the correct order (while the OPTIONS preflight requests seem to be) and that this example creates Promises even if they are not required (e.g. for Object 'C' in the example above).
function startIt(objArr) {
TransactionService.getTransaction().then(function (transaction) {
var promiseArray = MyService.submit(objArr, transaction.id);
$q.all(promiseArray).then(function () {
Transactions.closeTransaction(transaction.id, function () {}).then(function () {
});
};
});
}
This is the function for 'submit':
function submit(objArr, transactionId) {
var promises = objArr.map(function (obj) {
return submitWithTransId(transactionId, obj)
.then(function (response) {
// Object done
});
});
return promises;
}
And this function is actually creating the Promises:
function submitWithTransId(transactionId, obj) {
var promisesDelete = [];
var promisesUpdate = [];
var promisesCreate = [];
angular.forEach(obj['subs'], function (sub) {
switch (sub._action) {
case 'delete':
promisesDelete.push(createPromise(sub, bulktransactionId));
break;
case 'put':
promisesUpdate.push(createPromise(sub, bulktransactionId));
break;
case 'create':
promisesCreate.push(createPromise(sub, bulktransactionId));
break;
}
});
var chainedPromises = $q.all(promisesDelete).then(function (deleteResponse) {
return $q.all(promisesUpdate).then(function (updateResponse) {
return $q.all(promisesCreate).then(function (createResponse) {
});
});
});
return chainedPromises;
And this is my createPromise function:
/** only simplified handling create case **/
function createPromise(sub, bulktransactionId) {
var queryParams = {};
if (bulktransactionId !== undefined && bulktransactionId !== null) {
queryParams.transaction_id = bulktransactionId;
}
var promise = MyResourceService.create(queryParams, sub).$promise;
promise.then(function (newSub) {
// do something with the new/updated/deleted sub, especially update view model
});
return promise;
}
/** MyResourceService **/
return $resource(ENV.apiEndpoint + 'api/v1/subs/:_id',
{
_id: '#id'
}, {
create: {
method: 'POST'
}
}
);

You can have a look at the following solution. The objective is to provide you some sort of structure. Please see, you will have to modify to your use.
var objArr = [{
name: 'A',
subs: [{
id: 1,
_action: 'create'
},
{
id: 2,
_action: 'create'
},
{
id: 3,
_action: 'delete'
}
]
},
{
name: 'B',
subs: [{
id: 4,
_action: 'create'
},
{
id: 5,
_action: 'put'
}
]
},
{
name: 'C',
subs: []
}
];
var promises = objArr.map(function(obj) {
return firstLevelPromise(obj)
.then(function(response) {
console.log(response); // promise for each object
return response;
});
});
$q.all(promises)
.then(function(response) {
console.log(response); // completion - close transaction
});
function firstLevelPromise(obj) {
var deletePromises = [];
var putPromies = [];
var insertPromies = [];
obj.subs.forEach(function(sub) { // Preparing promises array for delete, put and insert
if (sub._action === "delete") {
deletePromises.push(deletePromise(sub));
} else if (sub._action === "put") {
putPromies.push(putPromise(sub));
} else {
insertPromies.push(insertPromise(sub));
}
});
return $q.all(deletePromises) // executing delete promises
.then(function(deleteResponse) {
console.log("deleteExecuted: " + obj.name);
return $q.all(putPromies) // on completion of delete, execute put promies
.then(function(putResponse) {
console.log("putExecuted: " + obj.name);
return $q.all(insertPromies) // on completion of put, execute insert promises
.then(function(insertResponse) {
console.log("insertExecuted: " + obj.name);
return "object promise completed: " + obj.name; // on completion, return
});
});
});
}
function deletePromise(task) {
return $q.resolve(task); // write your delete code here
}
function putPromise(task) {
return $q.resolve(task); // write your put code here
}
function insertPromise(task) {
return $q.resolve(task); // write your insert code here
}
Please note, the above code will do the following
Prepare a collection of promises where there is one promise for each object in objectArray
Each promise will have promises chain i.e. delete promises, followed by put promises and finally followed by insert promises i.e. it ensures that for each object perform the delete tasks, then on completion perform the put tasks and then on its completion perform the insert tasks.
Here is a plunker and documentation for $q
UPDATE
The problem is with the createPromise function only. Update your code to following. The problem was that you are calling the then before returning, hence, it is likely that the you are returning a resolved promise like $q.resolve(). In this case, it is possible that a create request gets resolved before delete or put and a put calls gets resolved before delete. Hence, you should return the promise from here and perform the post action things in the $q.all block.
function createPromise(sub, bulktransactionId) {
var queryParams = {};
if (bulktransactionId !== undefined && bulktransactionId !== null) {
queryParams.transaction_id = bulktransactionId;
}
return MyResourceService.create(queryParams, sub).$promise;
}

Try this - the trick is to accumulate the promise (that's what I'm using the reduce for. Hope it helps.
// reversed the order => delete actions first; otherwise you may have to do extra logic may be needed
var objArr = [
{ name: 'A',
subs: [
{ id: null,
_action: 'delete'
},
{ id: 2,
_action: 'create']
}
},
{ name: 'B',
subs: [
{ id: 3.
_action: 'create'
}
]
];
Promise.all(objArr.map(obj => {
return obj.subs.reduce((accumulatedPromisedSub, sub) => {
return accumulatedPromisedSub.then(_ => yourRequestCallHere(sub) )
},
Promise.resolve(true) // you could also do your delete here if you like
)
}))
// OR going sort order agnostic:
Promise.all(objArr.map(obj => {
const deleteAction = obj.subs.find(sub => sub._action === 'delete');
return obj.subs.reduce((accumulatedPromisedSub, sub) => {
if (sub._action === 'delete') return accumulatedPromisedSub;
return accumulatedPromisedSub.then(_ => yourRequestCallHere(sub) )
},
deleteAction ? yourRequestCall(deleteAction) : Promise.resolve(true)
)
}))

I am so happy and thankful, I found it.
To my understanding I had two main issues in my code.
As $resource is not providing a (please forgive me, pros!) real $promise even when I use something like .get().$promise I had to
use bind to map the createPromise function to my arrays
do the mapping only directly before returning the $q.all promise.
In any other case $resource seemed to immediately fill its promise with an empty object and $q.all did not work anymore as promises looked like they where resolved.
Special thanks to #nikhil who supported me in finding this ugly complex thing and who earned the bounty.
This is the working snippet:
function submitWithTransId(transactionId, obj) {
var arrayDelete = [];
var arrayUpdate = [];
var arrayCreate = [];
angular.forEach(obj['subs'], function (sub) {
switch (sub._action) {
case 'delete':
arrayDelete.push(sub);
break;
case 'update':
arrayUpdate.push(sub);
break;
case 'create':
arrayCreate.push(sub);
break;
}
});
var promisesDelete = flattenArray(arrayDelete.map(createPromise.bind(undefined, obj, transactionId)));
return $q.all(promisesDelete).then(function (deleteResponse) {
console.log('Delete Promises ' + obj.name + ' resolved');
var promisesUpdate = flattenArray(arrayUpdate.map(createPromise.bind(undefined, obj, transactionId)));
return $q.all(promisesUpdate).then(function (updateResponse) {
var promisesCreate = flattenArray(arrayCreate.map(createPromise.bind(undefined, obj, transactionId)));
console.log('Update Promises ' + obj.name + ' resolved');
return $q.all(promisesCreate).then(function (createResponse) {
console.log('Create Promises ' + obj.name + ' resolved');
});
});
}).catch(function (error) {
console.log('Catched an error: ');
console.log(error);
});
});

Related

angular chaining arrays of promises

I am building a website over a database of music tracks. The database is as follows :
music table contains musicid and title
musicrights table contains musicid and memberid
members table contains memberid and memberinfo.
I'm trying to build an array of objects in my database service, which each entry represents a track containing its rightholders (contains information aubout one rightholder but not his name) and their member info (contains name etc). The backend is sailsjs and the code is as follows :
angular.module("myapp").service("database", ["$q", "$http", function($q, $http) {
var database = {};
function getHolderMember(rightHolder) {
return ($http.get("/api/members?where=" + JSON.stringify({
memberid: rightHolder.memberid
})).then(function (res) {
rightHolder.member = res.data[0];
return (rightHolder);
}));
}
function getRightHolders(doc) {
return ($http.get("/api/musicrights?where=" + JSON.stringify({
musicid: doc.musicid
})).then(function(res) {
// array of promises :
// each rightholder of a document has to solve member info
var rightHolders = [];
for (var i in res.data) {
var rightHolder = {
member: res.data[i].memberid,
type: res.data[i].membertype,
rights: res.data[i].memberrights
};
rightHolders.push(getHolderMember(rightHolder));
}
return ($q.all(rightHolders));
}).then(function(rightHolders) {
// expected array of one or two rightholders,
// enriched with member information
// actually returns array of one or two arrays of 30 members
// without rightholder info
console.log(rightHolders);
doc.rightHolders = rightHolders;
return (doc);
}));
}
database.music = function(q) {
return ($http.get("/api/music?where=" + JSON.stringify({
or: [{
title: {
contains: q
}
}, {
subtitle: {
contains: q
}
}]
})).then(function(res) {
// array of 30 promises :
// each one of 30 documents has to resolve its rightholders
var documents = [];
for (var i in res.data) {
documents.push(getRightHolders(res.data[i]));
}
return ($q.all(documents));
}));
}
return (database);
}]);
The first array of promises seems to work as expected, but not the second one in getRightHolders. What is strange is that this function returns an array of one or two promises, which are rightHolders waiting for their memberinfo. But in the callback where I console.log the response, i get an array of one or two (as per the number of pushed promises) but this array's elements are arrays of 30 memberinfo instead of one memberinfo. I don't understand how this $q.all() call gets mixed with the previous-level $q.all.
The data structure is roughly like this
documents [ ] ($http => 30 responses)
music.musicid
music.rightHolders [ ] ($http => 1, 2, 3 responses)
rightholder.rights
rightholder.member ($http => 1 response)
member.memberinfo
Any help appreciated. Thank you !
UPDATE : Thank you for your answer, it worked like a charm. Here's the updated code, with also the migrate service which formats data differently (there is some database migration going on). I kept it out of the first example but your answer gave me this neat syntax.
angular.module("myApp").service("database", ["$q", "$http", "migrate", function($q, $http, migrate) {
var database = {};
function getHolderMember(rightHolder) {
return ($http.get("/api/members?where=" + JSON.stringify({
memberID: rightHolder.member
})).then(function(res) {
return (migrate.member(res.data[0]));
}).then(function(member) {
rightHolder.member = member;
return (rightHolder);
}));
}
function getRightHolders(doc) {
return ($http.get("/api/rightHolders?where=" + JSON.stringify({
musicID: doc.musicID
})).then(function(res) {
return (
$q.all(res.data
.map(migrate.rightHolder)
.map(getHolderMember)
)
);
}).then(function(rightHolders) {
doc.rightHolders = rightHolders;
return (doc);
}));
}
database.music = function(q) {
return ($http.get("/api/music?where=" + JSON.stringify({
or: [{
title: {
contains: q
}
},
{
subtitle: {
contains: q
}
}
]
})).then(function(res) {
return (
$q.all(res.data
.map(migrate.music)
.map(getRightHolders)
)
);
}));
}
return (database);
}
I'm not quite sure how you're getting the result you describe, but your logic is more convoluted than it needs to be and I think this might be leading to the issues you're seeing. You're giving the getRightsHolders function the responsibility of returning the document and based on your comment above, it sounds like you previously had the getHolderMember() function doing something similar and then stopped doing that.
We can clean this up by having each function be responsible for the entities it's handling and by using .map() instead of for (please don't use for..in with arrays).
Please give this a try:
angular
.module("myapp")
.service("database", ["$q", "$http", function($q, $http) {
var database = {};
function getHolderMember(memberId) {
var query = JSON.stringify({ memberid: memberid });
return $http.get("/api/members?where=" + query)
.then(function (res) {
return res.data[0];
});
}
function populateRightsHolderWithMember(rightsHolder) {
return getHolderMember(rightsHolder.memberid)
.then(function (member) {
rightsHolder.member = member;
return rightsHolder;
});
}
function getRightHolders(doc) {
var query = JSON.stringify({ musicid: doc.musicid });
return $http.get("/api/musicrights?where=" + query)
.then(function(res) {
return $q.all(res.data.map(populateRightsHolderWithMember));
});
}
function populateDocumentWithRightsHolders(document) {
return getRightsHolders(document)
.then(function(rightsHolders) {
document.rightsHolders = rightsHolders;
return document;
});
}
database.music = function(q) {
return $http.get("/api/music?where=" + JSON.stringify({
or: [{
title: {
contains: q
}
}, {
subtitle: {
contains: q
}
}]
})).then(function(res) {
return $q.all(res.data.map(populateDocumentWithRightsHolders));
});
}
return (database);
}]);

Can't compare MongoDB data with javascript array

I want to compare the data which I got from Mongo to javascript array. I am using lodash to compare. But it always return incorrect result.
var editUser = function(userData, getOutFunction) {
var status = CONSTANTS.NG;
checkExistUser(userData._id).then(function(user) {
if (user !== null) {
var userGroup = JSON.stringify(user.group);
user.group = user.group.map((groupId) => {
return groupId.toString();
});
var removedGroups = _.difference(userGroup, userData.group);
var addedGroups = _.difference(userData.group, userGroup);
console.log('Removed Groups: ', removedGroups);
console.log('Added Groups: ', addedGroups);
} else {
status = CONSTANTS.NG;
logger.debug(DEBUG_CLASS_NAME, "Cannot find object");
if (typeof(getOutFunction) !== 'undefined') {
getOutFunction(status, null);
} else {
NO_CALLBACK();
}
}
}).catch(function() {
console.log('Promise is error');
});
var checkExistUser = function(userId) {
return new Promise(function(resolve, reject) {
UserDAO.findById(userId, function(err, user) {
if (err) {
logger.debug(DEBUG_CLASS_NAME, {
name: err.name,
code: err.code,
message: err.message,
method: "checkExist"
});
resolve(null);
} else {
resolve(user);
}
});
});
}
For example:When I try to input value for lodash difference function
var user.group = ["58b8da67d585113517fed34e","58b8da6ed585113517fed34f"];
var userData.group = [ '58b8da67d585113517fed34e' ];
I want lodash difference return below result:
Removed Groups: ['58b8da6ed585113517fed34f']
Added Groups: []
However, the function gave me the result like:
Removed Groups: []
Added Groups: [ '58b8da67d585113517fed34e' ]
Can anyone help me in this case?
I will do appreciate it.
I have had this issue as well, the result from mongodb is an ObjectId type so you can compare the someObjectId.toString() value with your array of strings, or you could use
someObjectId.equals(stringOrObjectIdValue)
However, if you want to keep using lodash functions you will either have to force both arrays to strings or to ObjectIds before passing them into the function.

Sequentially ngresource calls

I have a controller making two $resource calls against two REST services where the result of the first one is used as input by the second one.
Here the code:
if (requestLock == false) {
$scope.T_01_04_sharedData.tempRequestForT_01_04 = insertNewRequest("aggr_1", $rootScope.globals.currentUser.username, "", "temp", "2016-07-30 00:00:00");
requestLock = true;
}
if (action == 'add') {
updateSelectedForRequest(prosumer, 'selected', $rootScope.globals.currentUser.username, $scope.T_01_04_sharedData.tempRequestForT_01_04);
} else {
updateSelectedForRequest(prosumer, 'non-selected', $rootScope.globals.currentUser.username, $scope.T_01_04_sharedData.tempRequestForT_01_04);
}
Function updateSelectedForRequest
function updateSelectedForRequest(username, status, businessUser, request) {
WidgetService.T_01_04_updateSelectedForRequest.query({
businessUser_id: businessUser,
request_id: request,
username: username,
status: status
}, function (result) {
// response handler
});
}
Function insertNewRequest
function insertNewRequest(bu_id_target, requester, description, status, validUntil) {
return WidgetService.T_01_04_insertNewRequest.query({
bu_id_target: bu_id_target,
requester: requester,
description: description,
status: status,
validUntil: validUntil
}, function (result) {
$scope.T_01_04_sharedData.tempRequestForT_01_04 = result.request_id;
return result;
});
}
The error is that the first call is not resolved sequentially so the second one has no input.
Is there the possibility to run these two calls sequentially in order to wait for the second call the input from the first one?
Thanks a lot.
I'm not familiar with ngresource, but you can try something like this.
if (requestLock == false) {
insertNewRequest("aggr_1", $rootScope.globals.currentUser.username, "", "temp", "2016-07-30 00:00:00")
.then(function(result){
$scope.T_01_04_sharedData.tempRequestForT_01_04 = result;
if (action == 'add') {
updateSelectedForRequest(prosumer, 'selected', $rootScope.globals.currentUser.username, $scope.T_01_04_sharedData.tempRequestForT_01_04);
} else {
updateSelectedForRequest(prosumer, 'non-selected', $rootScope.globals.currentUser.username, $scope.T_01_04_sharedData.tempRequestForT_01_04);
}
}, function(error){/* manage error here */});
requestLock = true;
}
function insertNewRequest(bu_id_target, requester, description, status, validUntil) {
return new Promise(function(resolve, reject){
PromiseWidgetService.T_01_04_insertNewRequest.query({
bu_id_target: bu_id_target,
requester: requester,
description: description,
status: status,
validUntil: validUntil
}, function (result) {
$scope.T_01_04_sharedData.tempRequestForT_01_04 = result.request_id;
resolve(result);
});
})
}
more information on promise here : https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise

Nodejs async data duplication

I'm having some problems with one async process on nodejs.
I'm getting some data from a remote JSON and adding it in my array, this JSON have some duplicated values, and I need check if it already exists on my array before add it to avoid data duplication.
My problem is when I start the loop between the JSON values, the loop call the next value before the latest one be process be finished, so, my array is filled with duplicated data instead of maintain only one item per type.
Look my current code:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
_.each(data, function(book) {
i++;
console.log('\n\n ------------------------------------------------------------ \n\n');
console.log('BOOK: ' + book.volumeInfo.title);
self.process_author(book, function() { console.log('in author'); });
console.log('\n\n ------------------------------------------------------------');
if(i == data.length) callback();
})
}
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
if(!this.in_array(this.authors, author)) {
this.authors.push(author);
callback();
}
}
}
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i] === obj) return true; }
return false;
}
The result is:
[{name: author1 }, {name: author2}, {name: author1}]
And I need:
[{name: author1 }, {name: author2}]
UPDATED:
The solution suggested by #Zub works fine with arrays, but not with sequelize and mysql database.
When I try to save my authors list on the database, the data is duplicated, because the system started to save another array element before finish to save the last one.
What is the correct pattern on this case?
My code using database is:
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
var self = this;
models.Author.count({ where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(count) {
if(count < 1) {
models.Author.create(author).success(function(author) {
console.log('SALVANDO AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
} else {
models.Author.find({where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(author) {
console.log('FIND AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
}
});
// if(!this.in_array(this.authors, 'name', author)) {
// this.authors.push(author);
// console.log('AQUI NO AUTHOR');
// this.process_publisher(book, callback);
// }
}
}
How can I avoid data duplication in an async process?
This is because you are comparing different objects and result is always false.
Just for experiment type in the console:
var obj1 = {a:1};
var obj2 = {a:1};
obj1 == obj2; //false
When comparing objects (as well as arrays) it only results true when obj1 links to obj2:
var obj1 = {a:1};
var obj2 = obj1;
obj1 == obj2; //true
Since you create new author objects in each process_author call you always get false when comparing.
In your case the solution would be to compare name property for each book:
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i].name === obj.name) return true; }
return false;
}
EDIT (related to your comment question):
I would rewrite process_new_books_list method as follows:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
(function nextBook() {
var book = data[i];
if (!book) {
callback();
return;
}
self.process_author(book, function() {
i++;
nextBook();
});
})();
}
In this case next process_author is being called not immediately (like with _.each), but after callback is executed, so you have consequence in your program.
Not sure is this works though.
Sorry for my English, I'm not a native English speaker

Synchronous database queries with Node.js

I have a Node.js/Express app that queries a MySQL db within the route and displays the result to the user. My problem is how do I run the queries and block until both queries are done before redirecting the user to the page they requested?
In my example I have 2 queries that need to finish before I render the page. I can get the queries to run synchronously if i nest query 2 inside the 'result' callback of query 1. This however will become very convoluted when the number of queries increase.
How do I go about running multiple (in this case 2) database queries synchronously without nesting the subsequent query in the prior query's 'result' callback?
I've looked at the 'Flow control / Async goodies' in the Node modules and tried flow-js but I can't get it to work with the async queries.
Listed below are the 2 queries that I'm attempting to execute from the '/home' route. Can the Node experts explain the 'right' way to do this.
app.get('/home', function (req,res) {
var user_array = [];
var title_array = [];
// first query
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
req.session.user_array = user_array;
});
// second query
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
req.session.title_array = title_array;
});
// because the queries are async no data is returned to the user
res.render('home.ejs', {layout: false, locals: { user_name: user_array, title: title_array }});
});
The goal with node is not to care what order things happen in. This can complicate some scenarios. There is no shame in nesting callbacks. Once you are used to how it looks, you may find that you actually prefer that style. I do; it is very clear what order callbacks will fire. You can forgo the anonymous functions to make it less verbose if you have to.
If you are willing to restructure your code a bit, you can use the "typical" nested callback method. If you want to avoid callbacks, there are numerous async frameworks that will try and help you do this. One that you might want to check out is async.js (https://github.com/fjakobs/async.js). Example of each:
app.get('/home', function (req,res) {
var lock = 2;
var result = {};
result.user_array = [];
result.title_array = [];
var finishRequest = function(result) {
req.session.title_array = result.title_array;
req.session.user_array = result.user_array;
res.render('home.ejs', {layout: false, locals: { user_name: result.user_array, title: result.title_array }});
};
// first query
var q1 = function(fn) {
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
result.user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
return fn && fn(null, result);
});
};
// second query
var q2 = function(fn) {
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
result.title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
return fn && fn(null, result);
});
}
//Standard nested callbacks
q1(function (err, result) {
if (err) { return; //do something}
q2(function (err, result) {
if (err) { return; //do something}
finishRequest(result);
});
});
//Using async.js
async.list([
q1,
q2,
]).call().end(function(err, result) {
finishRequest(result);
});
});
For a one-off, I would probably just use a reference counting type approach. Simply keep track of how many queries you want to execute and render the response when they have all finished.
app.get('/home', function (req,res) {
var lock = 2;
var user_array = [];
var title_array = [];
var finishRequest = function() {
res.render('home.ejs', {layout: false, locals: { user_name: user_array, title: title_array }});
}
// first query
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
req.session.user_array = user_array;
lock -= 1;
if (lock === 0) {
finishRequest();
}
});
// second query
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
req.session.title_array = title_array;
lock -= 1;
if (lock === 0) {
finishRequest();
}
});
});
An even nicer approach would be to simply call finishRequest() in each 'result' callback an check for non-empty arrays before you render the response. Whether that will work in your case depends on your requirements.
Here's a really easy trick to handle multiple callbacks.
var after = function _after(count, f) {
var c = 0, results = [];
return function _callback() {
switch (arguments.length) {
case 0: results.push(null); break;
case 1: results.push(arguments[0]); break;
default: results.push(Array.prototype.slice.call(arguments)); break;
}
if (++c === count) {
f.apply(this, results);
}
};
};
Example
Usage:
var handleDatabase = after(2, function (res1, res2) {
res.render('home.ejs', { locals: { r1: res1, r2: res2 }):
})
db.execute(sql1).on('result', handleDatabase);
db.execute(sql2).on('result', handleDatabase);
So basically you need reference counting. This is the standard approach in these situations. I actually use this small utility function instead of flow control.
If you want a full blown flow control solution I would recommend futuresJS
I find that the async library is the best for things like this. https://github.com/caolan/async#parallel
I can't test this or anything, so forgive me if there are some typos. I refactored your query function to be reusable. So, calling queryRows will return a function that matches the format of the async module's parallel callback functions. After both queries are complete, it will call the last function and pass the result of the two queries as an argument, which you can read to pass to your template.
function queryRows(col, table) {
return function(cb) {
var rows = [];
db.execute('SELECT ' + col + ' FROM ' + table)
.on('row', function(r) {
rows.push(r)
})
.on('result', function() {
cb(rows);
});
}
}
app.get('/home', function(req, res) {
async.parallel({
users: queryRow('user_name', 'users'),
titles: queryRow('title', 'code_samples')
},
function(result) {
res.render('home.ejs', {
layout: false,
locals: {user_name: result.users, title: result.titles}
});
});
});
There are some solutions here, but in my opinion the best solution is to make the code synchronously in a very easy way.
You could use the "synchonize" package.
Just
npm install synchronize
Then var sync = require(synchronize);
Put logic which should be synchronous into a fiber by using
sync.fiber(function() {
//put your logic here
}
An example for two mysql queries:
var express = require('express');
var bodyParser = require('body-parser');
var mysql = require('mysql');
var sync = require('synchronize');
var db = mysql.createConnection({
host : 'localhost',
user : 'user',
password : 'password',
database : 'database'
});
db.connect(function(err) {
if (err) {
console.error('error connecting: ' + err.stack);
return;
}
});
function saveSomething() {
var post = {id: newId};
//no callback here; the result is in "query"
var query = sync.await(db.query('INSERT INTO mainTable SET ?', post, sync.defer()));
var newId = query.insertId;
post = {foreignKey: newId};
//this query can be async, because it doesn't matter in this case
db.query('INSERT INTO subTable SET ?', post, function(err, result) {
if (err) throw err;
});
}
When "saveSomething()" is called, it inserts a row in a main table and receives the last inserted id. After that the code below will be executed. No need for nesting promises or stuff like that.
option one: if all your queries related to each other, create stored procedure, put all your data logic into it and have a single db.execute
option two: if your db uses one connection then commands a guaranteed to be executed serially and you can use this as async helper
db.execute(sql1).on('row', function(r) {
req.session.user_array.push(r.user);
});
db.execute(sql2)
.on('row', function(r) {
req.session.title_array.push(r.title);
})
.on('end'), function() {
// render data from req.session
});
You can use fibers to write pseudo-synchronous code with Node.JS take a look at these tests for DB https://github.com/alexeypetrushin/mongo-lite/blob/master/test/collection.coffee
they are asynchronous but looks like synchronous, more details http://alexeypetrushin.github.com/synchronize

Resources