backbone local storage adding twice - backbone.js

How do you work around backbone local storage adding twice.
I am getting double values for everything in mu local storage. The local storage also adds a key number and the key id sequences are out of order:
drink: function() {
var val = $('#new_booze').val();
console.log(val);
if (val === this.lastTitle) {
return;
}
this.lastTitle = val;
this.collection.create({
boozeTitle: val,
id: this.makeID(val)
});
this.collection.fetch({
reset: true
});
},

this is a bug in backbone local-storage's create function
create: function(model) {
if (!model.id) model.set(model.idAttribute, guid());
this.data[model.id] = model;
this.save();
return model;
},
setting the id & saving, both trigger a save on the model.
you can avoid this by using a fast non-cryptographic hash as the id since backbone will do a no-op if you add two models with the same id.
makeID: function(string) {
var hash = 0;
if(string.length === 0) {
return hash;
}
for(i = 0; i < string.length; i++)
{
char = string.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash;
}
return hash;
};
This has a few disadvantages, since its a hashing function, each title can only be added once, and theres a greater chance of a hash collision.

Related

Best way to get an element using its id from an array in Angular [duplicate]

I've got an array:
myArray = [{'id':'73','foo':'bar'},{'id':'45','foo':'bar'}, etc.]
I'm unable to change the structure of the array. I'm being passed an id of 45, and I want to get 'bar' for that object in the array.
How do I do this in JavaScript or using jQuery?
Use the find() method:
myArray.find(x => x.id === '45').foo;
From MDN:
The find() method returns the first value in the array, if an element in the array satisfies the provided testing function. Otherwise undefined is returned.
If you want to find its index instead, use findIndex():
myArray.findIndex(x => x.id === '45');
From MDN:
The findIndex() method returns the index of the first element in the array that satisfies the provided testing function. Otherwise -1 is returned.
If you want to get an array of matching elements, use the filter() method instead:
myArray.filter(x => x.id === '45');
This will return an array of objects. If you want to get an array of foo properties, you can do this with the map() method:
myArray.filter(x => x.id === '45').map(x => x.foo);
Side note: methods like find() or filter(), and arrow functions are not supported by older browsers (like IE), so if you want to support these browsers, you should transpile your code using Babel (with the polyfill).
As you are already using jQuery, you can use the grep function which is intended for searching an array:
var result = $.grep(myArray, function(e){ return e.id == id; });
The result is an array with the items found. If you know that the object is always there and that it only occurs once, you can just use result[0].foo to get the value. Otherwise you should check the length of the resulting array. Example:
if (result.length === 0) {
// no result found
} else if (result.length === 1) {
// property found, access the foo property using result[0].foo
} else {
// multiple items found
}
Another solution is to create a lookup object:
var lookup = {};
for (var i = 0, len = array.length; i < len; i++) {
lookup[array[i].id] = array[i];
}
... now you can use lookup[id]...
This is especially interesting if you need to do many lookups.
This won't need much more memory since the IDs and objects will be shared.
ECMAScript 2015 (JavaScript ES6) provides the find()
method on arrays:
var myArray = [
{id:1, name:"bob"},
{id:2, name:"dan"},
{id:3, name:"barb"},
]
// grab the Array item which matchs the id "2"
var item = myArray.find(item => item.id === 2);
// print
console.log(item.name);
It works without external libraries. But if you want older browser support you might want to include this polyfill.
Underscore.js has a nice method for that:
myArray = [{'id':'73','foo':'bar'},{'id':'45','foo':'bar'},etc.]
obj = _.find(myArray, function(obj) { return obj.id == '45' })
I think the easiest way would be the following, but it won't work on Internet Explorer 8 (or earlier):
var result = myArray.filter(function(v) {
return v.id === '45'; // Filter out the appropriate one
})[0].foo; // Get result and access the foo property
Try the following
function findById(source, id) {
for (var i = 0; i < source.length; i++) {
if (source[i].id === id) {
return source[i];
}
}
throw "Couldn't find object with id: " + id;
}
myArray.filter(function(a){ return a.id == some_id_you_want })[0]
A generic and more flexible version of the findById function above:
// array = [{key:value},{key:value}]
function objectFindByKey(array, key, value) {
for (var i = 0; i < array.length; i++) {
if (array[i][key] === value) {
return array[i];
}
}
return null;
}
var array = [{'id':'73','foo':'bar'},{'id':'45','foo':'bar'}];
var result_obj = objectFindByKey(array, 'id', '45');
Performance
Today 2020.06.20 I perform test on MacOs High Sierra on Chrome 81.0, Firefox 77.0 and Safari 13.1 for chosen solutions.
Conclusions for solutions which use precalculations
Solutions with precalculations (K,L) are (much much) faster than other solutions and will not be compared with them - probably they are use some special build-in browser optimisations
surprisingly on Chrome and Safari solution based on Map (K) are much faster than solution based on object {} (L)
surprisingly on Safari for small arrays solution based on object {} (L) is slower than traditional for (E)
surprisingly on Firefox for small arrays solution based on Map (K) is slower than traditional for (E)
Conclusions when searched objects ALWAYS exists
solution which use traditional for (E) is fastest for small arrays and fast for big arrays
solution using cache (J) is fastest for big arrays - surprisingly for small arrays is medium fast
solutions based on find (A) and findIndex (B) are fast for small arras and medium fast on big arrays
solution based on $.map (H) is slowest on small arrays
solution based on reduce (D) is slowest on big arrays
Conclusions when searched objects NEVER exists
solution based on traditional for (E) is fastest on small and big arrays (except Chrome-small arrays where it is second fast)
solution based on reduce (D) is slowest on big arrays
solution which use cache (J) is medium fast but can be speed up if we save in cache also keys which have null values (which was not done here because we want to avoid unlimited memory consumption in cache in case when many not existing keys will be searched)
Details
For solutions
without precalculations: A
B
C
D
E
F
G
H
I
J (the J solution use 'inner' cache and it speed depend on how often searched elements will repeat)
with precalculations
K
L
I perform four tests. In tests I want to find 5 objects in 10 loop iterations (the objects ID not change during iterations) - so I call tested method 50 times but only first 5 times have unique id values:
small array (10 elements) and searched object ALWAYS exists - you can perform it HERE
big array (10k elements) and searched object ALWAYS exist - you can perform it HERE
small array (10 elements) and searched object NEVER exists - you can perform it HERE
big array (10k elements) and searched object NEVER exists - you can perform it HERE
Tested codes are presented below
function A(arr, id) {
return arr.find(o=> o.id==id);
}
function B(arr, id) {
let idx= arr.findIndex(o=> o.id==id);
return arr[idx];
}
function C(arr, id) {
return arr.filter(o=> o.id==id)[0];
}
function D(arr, id) {
return arr.reduce((a, b) => (a.id==id && a) || (b.id == id && b));
}
function E(arr, id) {
for (var i = 0; i < arr.length; i++) if (arr[i].id==id) return arr[i];
return null;
}
function F(arr, id) {
var retObj ={};
$.each(arr, (index, obj) => {
if (obj.id == id) {
retObj = obj;
return false;
}
});
return retObj;
}
function G(arr, id) {
return $.grep(arr, e=> e.id == id )[0];
}
function H(arr, id) {
return $.map(myArray, function(val) {
return val.id == id ? val : null;
})[0];
}
function I(arr, id) {
return _.find(arr, o => o.id==id);
}
let J = (()=>{
let cache = new Map();
return function J(arr,id,el=null) {
return cache.get(id) || (el=arr.find(o=> o.id==id), cache.set(id,el), el);
}
})();
function K(arr, id) {
return mapK.get(id)
}
function L(arr, id) {
return mapL[id];
}
// -------------
// TEST
// -------------
console.log('Find id=5');
myArray = [...Array(10)].map((x,i)=> ({'id':`${i}`, 'foo':`bar_${i}`}));
const mapK = new Map( myArray.map(el => [el.id, el]) );
const mapL = {}; myArray.forEach(el => mapL[el.id]=el);
[A,B,C,D,E,F,G,H,I,J,K,L].forEach(f=> console.log(`${f.name}: ${JSON.stringify(f(myArray, '5'))}`));
console.log('Whole array',JSON.stringify(myArray));
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.5.1/jquery.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.min.js"></script>
This snippet only presents tested codes
Example tests results for Chrome for small array where searched objects always exists
As others have pointed out, .find() is the way to go when looking for one object within your array. However, if your object cannot be found using this method, your program will crash:
const myArray = [{'id':'73','foo':'bar'},{'id':'45','foo':'bar'}];
const res = myArray.find(x => x.id === '100').foo; // Uh oh!
/*
Error:
"Uncaught TypeError: Cannot read property 'foo' of undefined"
or in newer chrome versions:
Uncaught TypeError: Cannot read properties of undefined (reading 'foo')
*/
This can be fixed by checking whether the result of .find() is defined before using .foo on it. Modern JS allows us to do this easily with optional chaining, returning undefined if the object cannot be found, rather than crashing your code:
const myArray = [{'id':'73','foo':'bar'},{'id':'45','foo':'bar'}];
const res = myArray.find(x => x.id === '100')?.foo; // No error!
console.log(res); // undefined when the object cannot be found
If you do this multiple times, you may set up a Map (ES6):
const map = new Map( myArray.map(el => [el.id, el]) );
Then you can simply do a O(1) lookup:
map.get(27).foo
You can get this easily using the map() function:
myArray = [{'id':'73','foo':'bar'},{'id':'45','foo':'bar'}];
var found = $.map(myArray, function(val) {
return val.id == 45 ? val.foo : null;
});
//found[0] == "bar";
Working example: http://jsfiddle.net/hunter/Pxaua/
Using native Array.reduce
var array = [ {'id':'73' ,'foo':'bar'} , {'id':'45' ,'foo':'bar'} , ];
var id = 73;
var found = array.reduce(function(a, b){
return (a.id==id && a) || (b.id == id && b)
});
returns the object element if found, otherwise false
You can use filters,
function getById(id, myArray) {
return myArray.filter(function(obj) {
if(obj.id == id) {
return obj
}
})[0]
}
get_my_obj = getById(73, myArray);
While there are many correct answers here, many of them do not address the fact that this is an unnecessarily expensive operation if done more than once. In an extreme case this could be the cause of real performance problems.
In the real world, if you are processing a lot of items and performance is a concern it's much faster to initially build a lookup:
var items = [{'id':'73','foo':'bar'},{'id':'45','foo':'bar'}];
var lookup = items.reduce((o,i)=>o[i.id]=o,{});
you can then get at items in fixed time like this :
var bar = o[id];
You might also consider using a Map instead of an object as the lookup: https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/Map
Recently, I have to face the same thing in which I need to search the string from a huge array.
After some search I found It'll be easy to handle with simple code:
Code:
var items = mydata.filter(function(item){
return item.word.toLowerCase().startsWith( 'gk );
})
See https://jsfiddle.net/maheshwaghmare/cfx3p40v/4/
Iterate over any item in the array. For every item you visit, check that item's id. If it's a match, return it.
If you just want teh codez:
function getId(array, id) {
for (var i = 0, len = array.length; i < len; i++) {
if (array[i].id === id) {
return array[i];
}
}
return null; // Nothing found
}
And the same thing using ECMAScript 5's Array methods:
function getId(array, id) {
var obj = array.filter(function (val) {
return val.id === id;
});
// Filter returns an array, and we just want the matching item.
return obj[0];
}
You may try out Sugarjs from http://sugarjs.com/.
It has a very sweet method on Arrays, .find. So you can find an element like this:
array.find( {id: 75} );
You may also pass an object with more properties to it to add another "where-clause".
Note that Sugarjs extends native objects, and some people consider this very evil...
As long as the browser supports ECMA-262, 5th edition (December 2009), this should work, almost one-liner:
var bFound = myArray.some(function (obj) {
return obj.id === 45;
});
Here's how I'd go about it in pure JavaScript, in the most minimal manner I can think of that works in ECMAScript 3 or later. It returns as soon as a match is found.
var getKeyValueById = function(array, key, id) {
var testArray = array.slice(), test;
while(test = testArray.pop()) {
if (test.id === id) {
return test[key];
}
}
// return undefined if no matching id is found in array
return;
}
var myArray = [{'id':'73', 'foo':'bar'}, {'id':'45', 'foo':'bar'}]
var result = getKeyValueById(myArray, 'foo', '45');
// result is 'bar', obtained from object with id of '45'
More generic and short
function findFromArray(array,key,value) {
return array.filter(function (element) {
return element[key] == value;
}).shift();
}
in your case Ex. var element = findFromArray(myArray,'id',45) that will give you the whole element.
We can use Jquery methods $.each()/$.grep()
var data= [];
$.each(array,function(i){if(n !== 5 && i > 4){data.push(item)}}
or
var data = $.grep(array, function( n, i ) {
return ( n !== 5 && i > 4 );
});
use ES6 syntax:
Array.find, Array.filter, Array.forEach, Array.map
Or use Lodash https://lodash.com/docs/4.17.10#filter, Underscore https://underscorejs.org/#filter
Building on the accepted answer:
jQuery:
var foo = $.grep(myArray, function(e){ return e.id === foo_id})
myArray.pop(foo)
Or CoffeeScript:
foo = $.grep myArray, (e) -> e.id == foo_id
myArray.pop foo
Use Array.prototype.filter() function.
DEMO: https://jsfiddle.net/sumitridhal/r0cz0w5o/4/
JSON
var jsonObj =[
{
"name": "Me",
"info": {
"age": "15",
"favColor": "Green",
"pets": true
}
},
{
"name": "Alex",
"info": {
"age": "16",
"favColor": "orange",
"pets": false
}
},
{
"name": "Kyle",
"info": {
"age": "15",
"favColor": "Blue",
"pets": false
}
}
];
FILTER
var getPerson = function(name){
return jsonObj.filter(function(obj) {
return obj.name === name;
});
}
You can do this even in pure JavaScript by using the in built "filter" function for arrays:
Array.prototype.filterObjects = function(key, value) {
return this.filter(function(x) { return x[key] === value; })
}
So now simply pass "id" in place of key and "45" in place of value, and you will get the full object matching an id of 45. So that would be,
myArr.filterObjects("id", "45");
I really liked the answer provided by Aaron Digulla but needed to keep my array of objects so I could iterate through it later. So I modified it to
var indexer = {};
for (var i = 0; i < array.length; i++) {
indexer[array[i].id] = parseInt(i);
}
//Then you can access object properties in your array using
array[indexer[id]].property
Use:
var retObj ={};
$.each(ArrayOfObjects, function (index, obj) {
if (obj.id === '5') { // id.toString() if it is int
retObj = obj;
return false;
}
});
return retObj;
It should return an object by id.
This solution may helpful as well:
Array.prototype.grep = function (key, value) {
var that = this, ret = [];
this.forEach(function (elem, index) {
if (elem[key] === value) {
ret.push(that[index]);
}
});
return ret.length < 2 ? ret[0] : ret;
};
var bar = myArray.grep("id","45");
I made it just like $.grep and if one object is find out, function will return the object, rather than an array.
Dynamic cached find
In this solution, when we search for some object, we save it in cache. This is middle point between "always search solutions" and "create hash-map for each object in precalculations".
let cachedFind = (()=>{
let cache = new Map();
return (arr,id,el=null) =>
cache.get(id) || (el=arr.find(o=> o.id==id), cache.set(id,el), el);
})();
// ---------
// TEST
// ---------
let myArray = [...Array(100000)].map((x,i)=> ({'id':`${i}`, 'foo':`bar_${i}`}));
// example usage
console.log( cachedFind(myArray,'1234').foo );
// Benchmark
let bench = (id) => {
console.time ('time for '+id );
console.log ( cachedFind(myArray,id).foo ); // FIND
console.timeEnd('time for '+id );
}
console.log('----- no cached -----');
bench(50000);
bench(79980);
bench(99990);
console.log('----- cached -----');
bench(79980); // cached
bench(99990); // cached

MongoDB update nested array foreach

I have collection of Users, and each user has an array Ancestors, previous developer did wrong DB architecture, and now each of ancestors is string but must be an ObjectId. It still contain objectId(in fact HEX of object Id, like 558470744a73274db0f0d65d). How can I convert each of ancestors to ObjectId? I wrote this:
db.getCollection('Users').find({}).forEach(function(item){
if (item.Ancestors instanceof Array){
var tmp = new Array()
item.Ancestors.forEach(function(ancestor){
if (ancestor instanceof String){
tmp.push(ObjectId(ancestor))
}
})
item.Ancestors = tmp
db.getCollection('Users').save(item)
}
})
But looks like it works not properly, and some of ancestors now is ObjectId, and some null. And also ancestors can be null from start. so I put all that if's
The solution concept here is to loop through your collection with a cursor and for each document within the cursor, gather data about the index position of the Ancestors array elements.
You will then use this data later on in the loop as the update operation parameters to correctly identify the elements to update.
Supposing your collection is not that humongous, the intuition above can be implemented using the forEach() method of the cursor as you have done in your attempts to do the iteration and getting the index data for all the array elements involved.
The following demonstrates this approach for small datasets:
function isValidHexStr(id) {
var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
if(id == null) return false;
if(typeof id == "string") {
return id.length == 12 || (id.length == 24 && checkForHexRegExp.test(id));
}
return false;
};
db.users.find({"Ancestors.0": { "$exists": true, "$type": 2 }}).forEach(function(doc){
var ancestors = doc.Ancestors,
updateOperatorDocument = {};
for (var idx = 0; idx < ancestors.length; idx++){
if(isValidHexStr(ancestors[idx]))
updateOperatorDocument["Ancestors."+ idx] = ObjectId(ancestors[idx]);
};
db.users.updateOne(
{ "_id": doc._id },
{ "$set": updateOperatorDocument }
);
});
Now for improved performance especially when dealing with large collections, take advantage of using a Bulk() API for updating the collection in bulk.
This is quite effecient as opposed to the above operations because with the bulp API you will be sending the operations to the server in batches (for example, say a batch size of 1000) which gives you much better
performance since you won't be sending every request to the server but just once in every 1000 requests, thus making your updates more efficient and quicker.
The following examples demonstrate using the Bulk() API available in MongoDB versions >= 2.6 and < 3.2.
function isValidHexStr(id) {
var checkForHexRegExp = new RegExp("^[0-9a-fA-F]{24}$");
if(id == null) return false;
if(typeof id == "string") {
return id.length == 12 || (id.length == 24 && checkForHexRegExp.test(id));
}
return false;
};
var bulkUpdateOps = db.users.initializeUnorderedBulkOp(),
counter = 0;
db.users.find({"Ancestors.0": { "$exists": true, "$type": 2 }}).forEach(function(doc){
var ancestors = doc.Ancestors,
updateOperatorDocument = {};
for (var idx = 0; idx < ancestors.length; idx++){
if(isValidHexStr(ancestors[idx]))
updateOperatorDocument["Ancestors."+ idx] = ObjectId(ancestors[idx]);
};
bulkUpdateOps.find({ "_id": doc._id }).update({ "$set": updateOperatorDocument })
counter++; // increment counter for batch limit
if (counter % 1000 == 0) {
// execute the bulk update operation in batches of 1000
bulkUpdateOps.execute();
// Re-initialize the bulk update operations object
bulkUpdateOps = db.users.initializeUnorderedBulkOp();
}
})
// Clean up remaining operation in the queue
if (counter % 1000 != 0) { bulkUpdateOps.execute(); }
The next example applies to the new MongoDB version 3.2 which has since deprecated the Bulk() API and provided a newer set of apis using bulkWrite().
It uses the same cursors as above but creates the arrays with the bulk operations using the same forEach() cursor method to push each bulk write document to the array. Because write commands can accept no more than 1000 operations, you will need to group your operations to have at most 1000 operations and re-intialise the array when the loop hits the 1000 iteration:
var cursor = db.users.find({"Ancestors.0": { "$exists": true, "$type": 2 }}),
bulkUpdateOps = [];
cursor.forEach(function(doc){
var ancestors = doc.Ancestors,
updateOperatorDocument = {};
for (var idx = 0; idx < ancestors.length; idx++){
if(isValidHexStr(ancestors[idx]))
updateOperatorDocument["Ancestors."+ idx] = ObjectId(ancestors[idx]);
};
bulkUpdateOps.push({
"updateOne": {
"filter": { "_id": doc._id },
"update": { "$set": updateOperatorDocument }
}
});
if (bulkUpdateOps.length == 1000) {
db.users.bulkWrite(bulkUpdateOps);
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) { db.users.bulkWrite(bulkUpdateOps); }
Try like this using mongoose,
var mongoose = require('mongoose');
db.getCollection('Users').find({}).forEach(function(item){
if (item.Ancestors instanceof Array){
var tmp = new Array()
item.Ancestors.forEach(function(ancestor){
if (ancestor instanceof String){
tmp.push(mongoose.Types.ObjectId(ancestor))
}
})
item.Ancestors = tmp
db.getCollection('Users').save(item)
}
})

Prevent multiple submits in angularjs

I'm looking for a AngularJS-based way to prevent multiple submits per task.
I don't need buttons to be disabled after submission or close the form and wait for the task to be completed. Instead, I need requests to be unique.
To be more detailed, I need $http.get and $http.post stop sending multiple same requests.
Any Ideas?
According to this article, you can use provider decorator.
NOTE: this approach is based on angular-api
https://gist.github.com/adambuczynski/354364e2a58786e2be71
UPDATE
I've changed a little part in your suggested solution, because returned promises have lost .success and .error and .then.
Just use this edited code to have all of those functions working:
.config(["$provide", function ($provide) {
$provide.decorator('$http', function ($delegate, $q) {
var pendingRequests = {};
var $http = $delegate;
function hash(str) {
var h = 0;
var strlen = str.length;
if (strlen === 0) {
return h;
}
for (var i = 0, n; i < strlen; ++i) {
n = str.charCodeAt(i);
h = ((h << 5) - h) + n;
h = h & h;
}
return h >>> 0;
}
function getRequestIdentifier(config) {
var str = config.method + config.url;
if (config.data && typeof config.data === 'object') {
str += angular.toJson(config.data);
}
return hash(str);
}
var $duplicateRequestsFilter = function (config) {
if (config.ignoreDuplicateRequest) {
return $http(config);
}
var identifier = getRequestIdentifier(config);
if (pendingRequests[identifier]) {
if (config.rejectDuplicateRequest) {
return $q.reject({
data: '',
headers: {},
status: config.rejectDuplicateStatusCode || 400,
config: config
});
}
return pendingRequests[identifier];
}
pendingRequests[identifier] = $http(config);
$http(config).finally(function () {
delete pendingRequests[identifier];
});
return pendingRequests[identifier];
};
Object.keys($http).filter(function (key) {
return (typeof $http[key] === 'function');
}).forEach(function (key) {
$duplicateRequestsFilter[key] = $http[key];
});
return $duplicateRequestsFilter;
})
}])
It could be a performance issue but following idea could solve your problem.
Store the each request URL and DATA as key value pair on a variable. URL should be KEY. For Same URL multiple submission can be stored in a Array.
Then for any new call check the URL if it present in your stored object, then compare the data with each object thorughly (deep check, it is costly though).
If any exact match found then stop the processing. As same request came.
Other wise proceed and don't forget to store this data also.
But it is costly since need to check the data which could be havy.
Note: At the time of storing the data you could convert it to JSON String so it will be easier to compare between String.
here is the Code Algo
YourService.call(url, params) {
var Str1 = JSON.stringify(params);
if(StoredObj[url]) {
for each (StoredObj[url] as Str){
if(Str === Str1) {
return;
}
}
}
else {
StoredObj[url] = []; //new Array
}
StoredObj[url].push(Str1);
Call $http then;
}

Nodejs async data duplication

I'm having some problems with one async process on nodejs.
I'm getting some data from a remote JSON and adding it in my array, this JSON have some duplicated values, and I need check if it already exists on my array before add it to avoid data duplication.
My problem is when I start the loop between the JSON values, the loop call the next value before the latest one be process be finished, so, my array is filled with duplicated data instead of maintain only one item per type.
Look my current code:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
_.each(data, function(book) {
i++;
console.log('\n\n ------------------------------------------------------------ \n\n');
console.log('BOOK: ' + book.volumeInfo.title);
self.process_author(book, function() { console.log('in author'); });
console.log('\n\n ------------------------------------------------------------');
if(i == data.length) callback();
})
}
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
if(!this.in_array(this.authors, author)) {
this.authors.push(author);
callback();
}
}
}
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i] === obj) return true; }
return false;
}
The result is:
[{name: author1 }, {name: author2}, {name: author1}]
And I need:
[{name: author1 }, {name: author2}]
UPDATED:
The solution suggested by #Zub works fine with arrays, but not with sequelize and mysql database.
When I try to save my authors list on the database, the data is duplicated, because the system started to save another array element before finish to save the last one.
What is the correct pattern on this case?
My code using database is:
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
var self = this;
models.Author.count({ where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(count) {
if(count < 1) {
models.Author.create(author).success(function(author) {
console.log('SALVANDO AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
} else {
models.Author.find({where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(author) {
console.log('FIND AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
}
});
// if(!this.in_array(this.authors, 'name', author)) {
// this.authors.push(author);
// console.log('AQUI NO AUTHOR');
// this.process_publisher(book, callback);
// }
}
}
How can I avoid data duplication in an async process?
This is because you are comparing different objects and result is always false.
Just for experiment type in the console:
var obj1 = {a:1};
var obj2 = {a:1};
obj1 == obj2; //false
When comparing objects (as well as arrays) it only results true when obj1 links to obj2:
var obj1 = {a:1};
var obj2 = obj1;
obj1 == obj2; //true
Since you create new author objects in each process_author call you always get false when comparing.
In your case the solution would be to compare name property for each book:
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i].name === obj.name) return true; }
return false;
}
EDIT (related to your comment question):
I would rewrite process_new_books_list method as follows:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
(function nextBook() {
var book = data[i];
if (!book) {
callback();
return;
}
self.process_author(book, function() {
i++;
nextBook();
});
})();
}
In this case next process_author is being called not immediately (like with _.each), but after callback is executed, so you have consequence in your program.
Not sure is this works though.
Sorry for my English, I'm not a native English speaker

Underscore.js groupBy multiple values

Using Underscore.js, I'm trying to group a list of items multiple times, ie
Group by SIZE then for each SIZE, group by CATEGORY...
http://jsfiddle.net/rickysullivan/WTtXP/1/
Ideally, I'd like to have a function or extend _.groupBy() so that you can throw an array at it with the paramaters to group by.
var multiGroup = ['size', 'category'];
Probably could just make a mixin...
_.mixin({
groupByMulti: function(obj, val, arr) {
var result = {};
var iterator = typeof val == 'function' ? val : function(obj) {
return obj[val];
};
_.each(arr, function(arrvalue, arrIndex) {
_.each(obj, function(value, objIndex) {
var key = iterator(value, objIndex);
var arrresults = obj[objIndex][arrvalue];
if (_.has(value, arrvalue))
(result[arrIndex] || (result[arrIndex] = [])).push(value);
My head hurts, but I think some more pushing needs to go here...
});
})
return result;
}
});
properties = _.groupByMulti(properties, function(item) {
var testVal = item["size"];
if (parseFloat(testVal)) {
testVal = parseFloat(item["size"])
}
return testVal
}, multiGroup);
A simple recursive implementation:
_.mixin({
/*
* #mixin
*
* Splits a collection into sets, grouped by the result of running each value
* through iteratee. If iteratee is a string instead of a function, groups by
* the property named by iteratee on each of the values.
*
* #param {array|object} list - The collection to iterate over.
* #param {(string|function)[]} values - The iteratees to transform keys.
* #param {object=} context - The values are bound to the context object.
*
* #returns {Object} - Returns the composed aggregate object.
*/
groupByMulti: function(list, values, context) {
if (!values.length) {
return list;
}
var byFirst = _.groupBy(list, values[0], context),
rest = values.slice(1);
for (var prop in byFirst) {
byFirst[prop] = _.groupByMulti(byFirst[prop], rest, context);
}
return byFirst;
}
});
Demo in your jsfiddle
I think #Bergi's answer can be streamlined a bit by utilizing Lo-Dash's mapValues (for mapping functions over object values). It allows us to group the entries in an array by multiple keys in a nested fashion:
_ = require('lodash');
var _.nest = function (collection, keys) {
if (!keys.length) {
return collection;
}
else {
return _(collection).groupBy(keys[0]).mapValues(function(values) {
return nest(values, keys.slice(1));
}).value();
}
};
I renamed the method to nest because it ends up serving much the same role served by D3's nest operator. See this gist for details and this fiddle for demonstrated usage with your example.
lodash nest groupby
How about this rather simple hack?
console.log(_.groupBy(getProperties(), function(record){
return (record.size+record.category);
}));
Check out this underscore extension: Underscore.Nest, by Irene Ros.
This extension's output will be slightly different from what you specify, but the module is only about 100 lines of code, so you should be able to scan to get direction.
This is a great use case for the reduce phase of map-reduce. It's not going to be as visually elegant as the multi-group function (you can't just pass in an array of keys to group on), but overall this pattern gives you more flexibility to transform your data. EXAMPLE
var grouped = _.reduce(
properties,
function(buckets, property) {
// Find the correct bucket for the property
var bucket = _.findWhere(buckets, {size: property.size, category: property.category});
// Create a new bucket if needed.
if (!bucket) {
bucket = {
size: property.size,
category: property.category,
items: []
};
buckets.push(bucket);
}
// Add the property to the correct bucket
bucket.items.push(property);
return buckets;
},
[] // The starting buckets
);
console.log(grouped)
But if you just want it in an underscore mixin, here's my stab at it:
_.mixin({
'groupAndSort': function (items, sortList) {
var grouped = _.reduce(
items,
function (buckets, item) {
var searchCriteria = {};
_.each(sortList, function (searchProperty) { searchCriteria[searchProperty] = item[searchProperty]; });
var bucket = _.findWhere(buckets, searchCriteria);
if (!bucket) {
bucket = {};
_.each(sortList, function (property) { bucket[property] = item[property]; });
bucket._items = [];
buckets.push(bucket);
}
bucket._items.push(item);
return buckets;
},
[] // Initial buckets
);
grouped.sort(function (x, y) {
for (var i in sortList) {
var property = sortList[i];
if (x[property] != y[property])
return x[property] > y[property] ? 1 : -1;
}
return 0;
});
return _.map(grouped, function (group) {
var toReturn = { key: {}, value: group.__items };
_.each(sortList, function (searchProperty) { toReturn.key[searchProperty] = group[searchProperty]; });
return toReturn;
});
});
The improvements by joyrexus on bergi's method don't take advantage of the underscore/lodash mixin system. Here it is as a mixin:
_.mixin({
nest: function (collection, keys) {
if (!keys.length) {
return collection;
} else {
return _(collection).groupBy(keys[0]).mapValues(function(values) {
return _.nest(values, keys.slice(1));
}).value();
}
}
});
An example with lodash and mixin
_.mixin({
'groupByMulti': function (collection, keys) {
if (!keys.length) {
return collection;
} else {
return _.mapValues(_.groupBy(collection,_.first(keys)),function(values) {
return _.groupByMulti(values, _.rest(keys));
});
}
}
});
Here is an easy to understand function.
function mixin(list, properties){
function grouper(i, list){
if(i < properties.length){
var group = _.groupBy(list, function(item){
var value = item[properties[i]];
delete item[properties[i]];
return value;
});
_.keys(group).forEach(function(key){
group[key] = grouper(i+1, group[key]);
});
return group;
}else{
return list;
}
}
return grouper(0, list);
}
Grouping by a composite key tends to work better for me in most situations:
const groups = _.groupByComposite(myList, ['size', 'category']);
Demo using OP's fiddle
Mixin
_.mixin({
/*
* #groupByComposite
*
* Groups an array of objects by multiple properties. Uses _.groupBy under the covers,
* to group by a composite key, generated from the list of provided keys.
*
* #param {Object[]} collection - the array of objects.
* #param {string[]} keys - one or more property names to group by.
* #param {string} [delimiter=-] - a delimiter used in the creation of the composite key.
*
* #returns {Object} - the composed aggregate object.
*/
groupByComposite: (collection, keys, delimiter = '-') =>
_.groupBy(collection, (item) => {
const compositeKey = [];
_.each(keys, key => compositeKey.push(item[key]));
return compositeKey.join(delimiter);
}),
});

Resources