I am trying to create array from database objects :
I have entity "group" wich hasMany "devices", I want to create array whit all groups and for each groups the list of his devices :
[
{
"group_id": “1”,
"name": “My_group”,
"devices_list": [1, 2, 18]
},
{
"group_id": “2”,
"name": “My_second_group”,
"devices_list": [3, 24]
}
]
I tried several ways like this :
Group.all(function (err, groups) {
var resJson = {};
groups.forEach(function(group, index){
group.devices(function(err, devices){
resJson[index] = group;
console.log(devices);
resJson[index].devices_list = devices;
//End of the loop
if (index == groups.length -1){
send({code: 200, data: resJson});
}
});
});
});
EDIT 1 :
I tried this way too :
var resJson = {};
groups.forEach(function(group, index){
group.devices(function(err, devices){
resJson[index] = group;
resJson[index].devices_list = [];
devices.forEach(function(device,index2){
resJson[index].devices_list.push(device);
});
//End of the loop
if (index == groups.length -1){
send({code: 200, data: resJson});
}
});
});
But finally, my resJson contains only empty groups (groups without device associated), the other groups are not visible. Thus my devices_list are all empty whereas the console.log(devices) display devices.
It seems that the "send" instruction is processed before the treatment of non-empty groups.
What is the rigth way to do this ?
Thank you for your time
Instead of tracking and using an index against the length of the list perhaps you could use an after type of construct. I really enjoy them and they're easy to integrate and serve the perfect purpose for doing something after a set number of times.
First, lets define an after function you can use.
var after = function(amount, fn) {
var count = 0;
return function() {
count += 1;
if (count >= amount) {
fn.apply(arguments);
}
};
};
That should work for you now, let's modify your code sample to use this.
var json = []; // To return, you originally wanted an array.
Group.all(function(err, groups) {
if (err) {
// Handle the error
} else {
var sendJson = after(groups.length, function(json) {
send({code: 200, data: json});
});
groups.forEach(function(group) {
group.devices(function(err, devices) {
if (err) {
// Handle the error...
} else {
group.devices_list = devices;
json.push(group); // This part is different, using this method you'll match the JSON you gave as your "goal"
}
// This is outside the if/else because it needs to be called for every group
// regardless of change. If you do not call this the exact number of times
// that you specified it will never fire.
sendJson(json);
});
}
});
Perhaps something like that might clear up your issue.
Related
I am using Node v8.1.3
I have a JSON array as following:
[
{
"id":99,
"name": "ABC"
},
{
"id": 187,
"name": "AXZ"
}
]
This array has around 213000 e=objects in it.
Also, the ids in the objects are not in any order or pattern.
Now, I want to find if a particular id matches any ID in the array? what is the fastest wait to do it?
I tried
isIdValid(id) {
console.log(id)
return this.list.filter((elem) => {
return elem.id == id
}).length > 0;
}
But this is taking over 4 seconds.
One option is to first sort the whole list (or insert it into a binary search tree) which takes some time but is only done once. and from there you can use binary search for the ID which is way faster.
here is a sample bst code for node:
js-bst
also here is a package that can be used to query json data list very faster: Defiant
Edit
actually creating a hash table is a faster solution than a bst; here is a sample code that does the job:
data = [
{
"id":99,
"name": "ABC"
},
{
"id": 187,
"name": "AXZ"
}
]
var hashCache = {};
data.forEach(function(item){
hashCache[item.id] = item.name
});
// Usage:
var id = '99';
var record = hashCache[id];
if (record) {
alert(record);
} else {
console.log('no match found');
}
you should also consider that this hash table only works if the IDs are unique. otherwise, you need to store a list of names in the hash table for each ID.
These are not the "Most efficient way to check". I'm posting here just as a reference about some ways to prepare the data to have a more performant search by a specific key.
Also is worth considering that in this script, Map can have a better performance, just because it is the last of all the executions. When its turn comes, V8 would have already been done some optimizations internally. So try to run each of them separately to have better findings.
'use strict';
////////////////////////
// GENERATE TEST DATA //
////////////////////////
const dataSet = [];
let count = 213000;
while(count--) dataSet.push({id: count});
let idToBeFound = 212999;
// //////////////////////////
// // Using Literal Object //
// //////////////////////////
console.time('creatingIndexAsLiteralObject');
const literalObjectKeyedByID = dataSet.map(item => ({[item.id]: true}));
console.timeEnd('creatingIndexAsLiteralObject');
console.time('isIdValidSeekingOnLiteralObject');
console.log('isIdValidSeekingOnLiteralObject :: Found?', isIdValidSeekingOnLiteralObject(idToBeFound, literalObjectKeyedByID));
console.timeEnd('isIdValidSeekingOnLiteralObject');
function isIdValidSeekingOnLiteralObject(id, list) {
return !!list[id];
}
// //////////////////////
// // Using Set Object //
// //////////////////////
console.time('creatingIndexAsSetObject');
const setObject = new Set(dataSet.map(item => item.id));
console.timeEnd('creatingIndexAsSetObject');
console.time('isIdValidSeekingOnSet');
console.log('isIdValidSeekingOnSet :: Found?', isIdValidSeekingOnSet(idToBeFound, setObject));
console.timeEnd('isIdValidSeekingOnSet');
function isIdValidSeekingOnSet(id, list) {
return list.has(id);
}
//////////////////////
// Using Map Object //
//////////////////////
console.time('creatingIndexAsMapObject');
const mapObjectKeyedByID = new Map();
dataSet.forEach(item => mapObjectKeyedByID.set(item.id));
console.timeEnd('creatingIndexAsMapObject');
console.time('isIdValidSeekingOnMap');
console.log('isIdValidSeekingOnMap :: Found?', isIdValidSeekingOnMap(idToBeFound, mapObjectKeyedByID));
console.timeEnd('isIdValidSeekingOnMap');
function isIdValidSeekingOnMap(id, list) {
return list.has(id);
}
I have collection of Users, and each user has an array Ancestors, previous developer did wrong DB architecture, and now each of ancestors is string but must be an ObjectId. It still contain objectId(in fact HEX of object Id, like 558470744a73274db0f0d65d). How can I convert each of ancestors to ObjectId? I wrote this:
db.getCollection('Users').find({}).forEach(function(item){
if (item.Ancestors instanceof Array){
var tmp = new Array()
item.Ancestors.forEach(function(ancestor){
if (ancestor instanceof String){
tmp.push(ObjectId(ancestor))
}
})
item.Ancestors = tmp
db.getCollection('Users').save(item)
}
})
But looks like it works not properly, and some of ancestors now is ObjectId, and some null. And also ancestors can be null from start. so I put all that if's
The solution concept here is to loop through your collection with a cursor and for each document within the cursor, gather data about the index position of the Ancestors array elements.
You will then use this data later on in the loop as the update operation parameters to correctly identify the elements to update.
Supposing your collection is not that humongous, the intuition above can be implemented using the forEach() method of the cursor as you have done in your attempts to do the iteration and getting the index data for all the array elements involved.
The following demonstrates this approach for small datasets:
function isValidHexStr(id) {
var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
if(id == null) return false;
if(typeof id == "string") {
return id.length == 12 || (id.length == 24 && checkForHexRegExp.test(id));
}
return false;
};
db.users.find({"Ancestors.0": { "$exists": true, "$type": 2 }}).forEach(function(doc){
var ancestors = doc.Ancestors,
updateOperatorDocument = {};
for (var idx = 0; idx < ancestors.length; idx++){
if(isValidHexStr(ancestors[idx]))
updateOperatorDocument["Ancestors."+ idx] = ObjectId(ancestors[idx]);
};
db.users.updateOne(
{ "_id": doc._id },
{ "$set": updateOperatorDocument }
);
});
Now for improved performance especially when dealing with large collections, take advantage of using a Bulk() API for updating the collection in bulk.
This is quite effecient as opposed to the above operations because with the bulp API you will be sending the operations to the server in batches (for example, say a batch size of 1000) which gives you much better
performance since you won't be sending every request to the server but just once in every 1000 requests, thus making your updates more efficient and quicker.
The following examples demonstrate using the Bulk() API available in MongoDB versions >= 2.6 and < 3.2.
function isValidHexStr(id) {
var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
if(id == null) return false;
if(typeof id == "string") {
return id.length == 12 || (id.length == 24 && checkForHexRegExp.test(id));
}
return false;
};
var bulkUpdateOps = db.users.initializeUnorderedBulkOp(),
counter = 0;
db.users.find({"Ancestors.0": { "$exists": true, "$type": 2 }}).forEach(function(doc){
var ancestors = doc.Ancestors,
updateOperatorDocument = {};
for (var idx = 0; idx < ancestors.length; idx++){
if(isValidHexStr(ancestors[idx]))
updateOperatorDocument["Ancestors."+ idx] = ObjectId(ancestors[idx]);
};
bulkUpdateOps.find({ "_id": doc._id }).update({ "$set": updateOperatorDocument })
counter++; // increment counter for batch limit
if (counter % 1000 == 0) {
// execute the bulk update operation in batches of 1000
bulkUpdateOps.execute();
// Re-initialize the bulk update operations object
bulkUpdateOps = db.users.initializeUnorderedBulkOp();
}
})
// Clean up remaining operation in the queue
if (counter % 1000 != 0) { bulkUpdateOps.execute(); }
The next example applies to the new MongoDB version 3.2 which has since deprecated the Bulk() API and provided a newer set of apis using bulkWrite().
It uses the same cursors as above but creates the arrays with the bulk operations using the same forEach() cursor method to push each bulk write document to the array. Because write commands can accept no more than 1000 operations, you will need to group your operations to have at most 1000 operations and re-intialise the array when the loop hits the 1000 iteration:
var cursor = db.users.find({"Ancestors.0": { "$exists": true, "$type": 2 }}),
bulkUpdateOps = [];
cursor.forEach(function(doc){
var ancestors = doc.Ancestors,
updateOperatorDocument = {};
for (var idx = 0; idx < ancestors.length; idx++){
if(isValidHexStr(ancestors[idx]))
updateOperatorDocument["Ancestors."+ idx] = ObjectId(ancestors[idx]);
};
bulkUpdateOps.push({
"updateOne": {
"filter": { "_id": doc._id },
"update": { "$set": updateOperatorDocument }
}
});
if (bulkUpdateOps.length == 1000) {
db.users.bulkWrite(bulkUpdateOps);
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) { db.users.bulkWrite(bulkUpdateOps); }
Try like this using mongoose,
var mongoose = require('mongoose');
db.getCollection('Users').find({}).forEach(function(item){
if (item.Ancestors instanceof Array){
var tmp = new Array()
item.Ancestors.forEach(function(ancestor){
if (ancestor instanceof String){
tmp.push(mongoose.Types.ObjectId(ancestor))
}
})
item.Ancestors = tmp
db.getCollection('Users').save(item)
}
})
In my users profile collection I have array with image objects in it.
A user can have a max of 3 images in their profile collection. If the user has 3, throw an error that the maximum has been reached. The user has the option to remove an image themselves in the frontend.
I thought the solution would be to check the length of the array with $size. if it's less then 3, insert the image, else throw error.
I'm using the tomi:upload-jquery package.
client:
Template.uploadImage.helpers({
uploadUserData: function() {
return Meteor.user();
},
finishUpload: function() {
return {
finished: function(index, fileInfo, context) {
Meteor.call('insert.profileImage', fileInfo, function(error, userId) {
if (error) {
// todo: display modal with error
return console.log(error.reason);
} else {
// console.log('success ' +userId);
// console.log('success ' + fileInfo);
}
});
}
};
}
});
The method (server) I use:
'insert.profileImage': function(postImage) {
check(postImage, Object);
// check array profile.images max 3
Meteor.users.update(this.userId, {
$push: {
'profile.images': postImage
}
});
},
You may do it with a function using the $where operator:
'insert.profileImage': function(postImage) {
var updateResults;
check(postImage, Object);
updateResults = Meteor.users.update(
{
_id : this.userId,
$where : 'this.profile.images.length < 3' //'this' is the tested doc
},
{
$push: {
'profile.images': postImage
}
});
if(updateResults === 0) {
throw new Meteor.Error('too-many-profile-images',
'A user can only have up to 3 images on his/her profile');
}
},
The Mongo docs warns about potential performance issues (if you run a JavaScript function on all documents of the store, you're in for bad surprises) but since we also search by _id I guess it should be fine.
This way, the update just doesn't run if the user has too many images. You can also check the number of affected document (the return value of the update) to know if something happened. If nothing (returns 0) happened, there's not many possibilities: The user has too many images.
Use the $exists operator to check the existence of all documents that have at least a fourth profile image array element (index position 3) with the dot notation. For example you could use it to check whether the size of the profile.image array is greater than 3 with the find() method as follows:
var hasSizeGreaterThanThree = Meteor.users.find(
{
'_id': this.userId,
'profile.image.3': { '$exists': true }
}).count() > 0;
So you could use that in your code as:
'insert.profileImage': function(postImage) {
check(postImage, Object);
// check array profile.images max 3
var hasSizeGreaterThanThree = Meteor.users.find(
{
'_id': this.userId,
'profile.image.3': { '$exists': true }
}).count() > 0;
if (!hasSizeGreaterThanThree){
Meteor.users.update(this.userId, {
$push: {
'profile.images': postImage
}
});
}
},
I'm having some problems with one async process on nodejs.
I'm getting some data from a remote JSON and adding it in my array, this JSON have some duplicated values, and I need check if it already exists on my array before add it to avoid data duplication.
My problem is when I start the loop between the JSON values, the loop call the next value before the latest one be process be finished, so, my array is filled with duplicated data instead of maintain only one item per type.
Look my current code:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
_.each(data, function(book) {
i++;
console.log('\n\n ------------------------------------------------------------ \n\n');
console.log('BOOK: ' + book.volumeInfo.title);
self.process_author(book, function() { console.log('in author'); });
console.log('\n\n ------------------------------------------------------------');
if(i == data.length) callback();
})
}
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
if(!this.in_array(this.authors, author)) {
this.authors.push(author);
callback();
}
}
}
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i] === obj) return true; }
return false;
}
The result is:
[{name: author1 }, {name: author2}, {name: author1}]
And I need:
[{name: author1 }, {name: author2}]
UPDATED:
The solution suggested by #Zub works fine with arrays, but not with sequelize and mysql database.
When I try to save my authors list on the database, the data is duplicated, because the system started to save another array element before finish to save the last one.
What is the correct pattern on this case?
My code using database is:
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
var self = this;
models.Author.count({ where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(count) {
if(count < 1) {
models.Author.create(author).success(function(author) {
console.log('SALVANDO AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
} else {
models.Author.find({where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(author) {
console.log('FIND AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
}
});
// if(!this.in_array(this.authors, 'name', author)) {
// this.authors.push(author);
// console.log('AQUI NO AUTHOR');
// this.process_publisher(book, callback);
// }
}
}
How can I avoid data duplication in an async process?
This is because you are comparing different objects and result is always false.
Just for experiment type in the console:
var obj1 = {a:1};
var obj2 = {a:1};
obj1 == obj2; //false
When comparing objects (as well as arrays) it only results true when obj1 links to obj2:
var obj1 = {a:1};
var obj2 = obj1;
obj1 == obj2; //true
Since you create new author objects in each process_author call you always get false when comparing.
In your case the solution would be to compare name property for each book:
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i].name === obj.name) return true; }
return false;
}
EDIT (related to your comment question):
I would rewrite process_new_books_list method as follows:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
(function nextBook() {
var book = data[i];
if (!book) {
callback();
return;
}
self.process_author(book, function() {
i++;
nextBook();
});
})();
}
In this case next process_author is being called not immediately (like with _.each), but after callback is executed, so you have consequence in your program.
Not sure is this works though.
Sorry for my English, I'm not a native English speaker
I've had a requirement recently to implement a UI for managing a many-many relationship. Ward Bell kindly provided this plunker showing how to implement using 1-m-1 with Angular and Breeze.
My app's design is based largely (especially the datacontext and the local storage) is based largely on John Papa's recent Pluralsight courses.
In my app, BusUnit = Hero and Dimension = Power (in reference to Ward's example.
Everything seems to be working well when I force the app to fetch data from the server, in that my updates to a business unit's dimensions reflect correctly. The problem I'm facing now is when I navigate away from the page and back again (which gets data from local storage). In this case:
if I previously added a new dimension to a business unit, everything is ok, but
if i previously marked a business unit's dimension for deletion and the save, the dimension still appears for the business unit in question.
this is the controller code that initially gets business units and their dimensions:
function getdboardStructure() {
var busUnitsPromise = datacontextSvc.busUnits.getByDboardConfigId(vm.dboardConfig.id);
var dimensionsPromise = datacontextSvc.dimensions.getByDboardConfigId(vm.dboardConfig.id);
$q.all([busUnitsPromise, dimensionsPromise])
.then(function (values) {
vm.busUnits = values[0];
vm.dims = values[1];
createBusUnitVms();
//vm.currentBusUnitVm = vm.busUnitVms[0]; // not required as using accordion instead of drop-down
vm.hasChanges = false;
});
}
this is the code in my controller that prepares for the save:
function applyBusUnitDimensionSelections(busUnitVm) {
var busUnit = busUnitVm.busUnit;
var mapVms = busUnitVm.dimensionMapVms;
var dimensionHash = createBusUnitDimensionHash(busUnit);
mapVms.forEach(function (mapVm) {
var map = dimensionHash[mapVm.dimension.id];
if (mapVm.selected) {
if (!map) {
datacontextSvc.busUnits.addBusUnitDimension(busUnit, mapVm.dimension)
.then(function () {
});
}
} else {
if (map) {
datacontextSvc.markDeleted(map);
}
}
});
}
this is the code in my controller that executes the save:
function save() {
if (!canSave()) {
return $q.when(null);
}
vm.isSaving = true;
vm.busUnitVms.forEach(applyBusUnitDimensionSelections);
return datacontextSvc.save().then(function (saveResult) {
vm.isSaving = false;
trapSavedDboardConfigId(saveResult); // not relevant to use case
}, function (error) {
vm.isSaving = false;
});
}
this is the code in my repository that add a new busUnitDimension entity:
function addBusUnitDimension(busUnit, dimension) {
var newBusUnitDimension = this.em.createEntity(busUnitDimension);
newBusUnitDimension.busUnitId = busUnit.id;
newBusUnitDimension.dimensionId = dimension.id;
return this.$q.when(newBusUnitDimension);
}
this is my datacontext code for marking an item deleted:
function markDeleted(entity) {
return entity.entityAspect.setDeleted();
}
and finally this is the repository code to get business units and their join table entities:
function getByDboardConfigId(dboardConfigId, forceRefresh) {
var self = this;
var predicate = pred.create('dboardConfigId', '==', dboardConfigId);
var busUnits;
if (self.zStorage.areItemsLoaded('busUnits') && !forceRefresh) {
busUnits = self._getAllLocal(entityName, orderBy, predicate);
return self.$q.when(busUnits);
}
return eq.from('BusUnits')
.expand('BusUnitDimensions')
.where(predicate)
.orderBy(orderBy)
.using(self.em).execute()
.to$q(succeeded, self._failed);
function succeeded(data) {
busUnits = data.results;
self.zStorage.areItemsLoaded('busUnits', true);
self.zStorage.save();
//self.logSuccess('Retrieved ' + busUnits.length + ' business units from server', busUnits.length, true);
return busUnits;
}
}
My departure from John's course examples is that I'm using expand in the function I use to get Business Units from the server, and my hypothesis is that this has something to do with the fact that breeze is going to the server everytime I refresh the page (without clearing cache) instead, and that this also has something to do with the error i'm receiving if I navigate away and then back to the page.
Can anyone offer and suggestions?
Appreciate this was a long time ago and you have probably solved it or moved on but I came up against the same problem recently that took me ages to resolve.
The answer I found is that you have to edit JP's angular.breeze.storagewip.js file.
I contains the names of the entities hard-coded into the file and you will need to change these to match your own entities.
There are two functions where you need to do this, examples below show the changes with the four entities I am using:
function zStorageCore($rootScope, zStorageConfig) {
var storeConfig = zStorageConfig.config;
var storeMeta = {
breezeVersion: breeze.version,
appVersion: storeConfig.version,
isLoaded: {
elementAssets : false,
surveyors : false,
elements : false,
assets : false
}
};
and...
function checkStoreImportVersionAndParseData(importedData) {
if (!importedData) {
return importedData;
}
try {
var data = JSON.parse(importedData);
var importMeta = data[0];
if (importMeta.breezeVersion === storeMeta.breezeVersion &&
importMeta.appVersion === storeMeta.appVersion) {
if (importMeta.isLoaded) {
storeMeta.isLoaded.assets = storeMeta.isLoaded.assets || importMeta.isLoaded.assets;
storeMeta.isLoaded.elements = storeMeta.isLoaded.elements || importMeta.isLoaded.elements;
storeMeta.isLoaded.surveyors = storeMeta.isLoaded.surveyors || importMeta.isLoaded.surveyors;
storeMeta.isLoaded.elementAssets = storeMeta.isLoaded.elementAssets || importMeta.isLoaded.elementAssets;
}
return data[1];
} else {
_broadcast(storeConfig.events.error,
'Did not load from storage because mismatched versions',
{ current: storeMeta, storage: importMeta });
}
} catch (ex) {
_broadcast(storeConfig.events.error, 'Exception during load from storage: ' + ex.message, ex);
}
return null; // failed
}
I solved this by comparing JP's Style Guide course files with his SPA/Angular/Breeze course.