I want to populate an array of objects with data pulled from a getJSON function.
I'm currently doing it object by object, but know it'd be much cleaner with a for loop...
Here's the getJSON:
$.getJSON("http://www.reddit.com/r/pics.json?jsonp=?",
function(data) {
var imageurl1 = data.data.children[0].data.url;
var imagecaption1 = data.data.children[0].data.title;;
var commentlink1 = "http://www.reddit.com" + data.data.children[0].data.permalink;
var imageurl2 = data.data.children[1].data.url;
var imagecaption2 = data.data.children[1].data.title;
var commentlink2 = "http://www.reddit.com" + data.data.children[1].data.permalink;
}
And here's the array I'm using the data for
var lightboximages = [
{
src: imageurl1,
caption: imagecaption1,
comments: commentlink1
}, {
src: imageurl2,
caption: imagecaption2,
comments: commentlink2
}]
Any help with the loop would be much appreciated!
Not tested, but this is the idea:
$.getJSON("http://www.reddit.com/r/pics.json?jsonp=?",
function(data) {
var lightboxArray = [];
for (var i=0; i<data.data.children.length; i++) {
var child = data.data.children[i];
var lightboxObj = {"src":child.data.url, "caption":child.data.title, "comments":child.data.permalink};
lightboxArray.push(lightboxObj);
}
// now call some function to process the array we've built
});
Related
This is my poor code
function loaddata() {
var url = "http://localhost/Geocording/api.php";
$.getJSON(url, function (data) {
var json = data
for (var i = 0, length = json.length; i < length; i++) {
var val = json[i],
var latLng = new google.maps.LatLng(val.lat, val.lng);
console.log(latLng)
}
});
}
Im trying to get details from my own api using json array.
but its not working.
{"location":[{"name":"Home 1","lat":"6.824367","lng":"80.034523","type":"1"},{"name":"Grid Tower 1","lat":"6.82371292","lng":"80.03451942","type":"1"},{"name":"Power Station A","lat":"6.82291793","lng":"80.03417451","type":"1"}],"success":1}
This is json response from my api.php
Try to make things clear first then apply it. First read JSON clearly then go on to apply it in your code. This is the working code.
function loaddata() {
var url = "http://localhost/Geocording/api.php";
$.getJSON(url, function (data) {
var json = data['location'];
for (var i = 0, length = json.length; i < length; i++) {
var val = json[i];
var latLng = new google.maps.LatLng(val['lat'], val['lng']);
console.log(latLng)
}
});
}
Hope this may help you!
I have two array as follows
var field_array=["booktitle","bookid","bookauthor"];
var data_array=["testtitle","testid","testauthor"];
I want to combine these two array and covert it to the following format
var data={
"booktitle":"testtitle",
"bookid":"testid",
"bookauthor":"testauthor"
}
I want to insert this data to database using nodejs
var lastquery= connection.query('INSERT INTO book_tbl SET ?',data, function (error, results, fields) {
if (error) {
res.redirect('/list');
}else{
res.redirect('/list');
}
});
Please help me to solve this.
var field_array = ["booktitle", "bookid", "bookauthor"];
var data_array = ["testtitle", "testid", "testauthor"];
var finalObj = {};
field_array.forEach(function (eachItem, i) {
finalObj[eachItem] = data_array[i];
});
console.log(finalObj); //finalObj contains ur data
You also can use reduce() in a similar way:
var field_array=["booktitle","bookid","bookauthor"];
var data_array=["testtitle","testid","testauthor"];
var result = field_array.reduce((acc, item, i) => {
acc[item] = data_array[i];
return acc;
}, {});
console.log(result);
Here I explaned my code line by line..Hope it will help
var field_array = ["booktitle", "bookid", "bookauthor"];
var data_array = ["testtitle", "testid", "testauthor"];
//Convert above two array into JSON Obj
var jsondata = {};
field_array.forEach(function (eachItem, i) {
jsondata[eachItem] = data_array[i];
});
//End
//Store Jsondata into an array according to Database column structure
var values = [];
for (var i = 0; i < jsondata.length; i++)
values.push([jsondata[i].booktitle, jsondata[i].bookid, jsondata[i].bookauthor]);
//END
//Bulk insert using nested array [ [a,b],[c,d] ] will be flattened to (a,b),(c,d)
connection.query('INSERT INTO book_tbl (booktitle, bookid, bookauthor) VALUES ?', [values], function(err, result) {
if (err) {
res.send('Error');
}
else {
res.send('Success');
}
//END
I have 2 API calls.
The second API call depends on the Property ID returned to make the second API call to check if each of these properties has parking.
If it does, then I add details of that property to an object and push the object into an Array.
The second API call is nested inside the first. After I've looped through all the properties, I check if the Array length is more than 0, if it is then I can display the returned properties in page, else it shows an error.
The problem is even when there are properties returned with parking, the else statement or error function executes, as well as displaying properties on the page.
Is there a way to complete the nested Promise before checking if my Array is more than 0?
Here's my code:
$scope.viewPropertyList = function(latlong) {
$scope.locationError = false;
var latlongArray = latlog.split('::');
var searchLat_scope = latlongArray[0];
var searchLon_scope = latlongArray[1];
if (searchLat_scope && searchLon_scope) {
var data = Property.getAllProperties({
dest: 'property',
apikey: API_KEY,
lat: encodeURIComponent(searchLat_scope),
limit: 10,
lon: encodeURIComponent(searchLon_scope)
}).$promise.then(function(success) {
var propertyMarkers = [];
$scope.dbMarkers = 0;
for (var i = 0, l = success.property.length; i < l; i++) {
(function(i) {
Property.getProperty({
dest: 'property',
propertyId: success.property[i].name,
apikey: API_KEY
}).$promise.then(function(propertyData) {
for (var j = 0, k = propertyData.services.length; j < k; j++) {
if (propertyData.services[j].name === "parking") {
var obj = {
"propertyName": success.property[i].propertyName,
"telephone": success.property[i].telephone,
"postcode": success.property[i].address.postcode,
"city": success.property[i].address.city,
"county": success.property[i].address.county,
"addressLine1": success.property[i].address.addressLine1
};
propertyMarkers.push(obj);
}
}
if (propertyMarkers.length != 0) {
$scope.dbMarkers = propertyMarkers;
$scope.selectedLat = searchLat_scope;
$scope.selectedlog = searchLon_scope;
} else {
$scope.locationErr = true;
$scope.errorMsg = "No properties found";
}
});
})(i);
}
}, function(error) {
$scope.locationErr = true;
$scope.errorMsg = "Something went wrong, please try again";
});
}
}
Two main things :
there's no attempt to aggregate multiple promises generated in a loop.
the if (propertyMarkers.length > 0) {...} else {...} is too deeply nested.
Minor :
the inner iteration can break as soon as 'parking' is found. If it continued and further 'parking' was found, then duplicate markers would be created.
$scope.viewPropertyList = function(latlong) {
$scope.locationError = false;
var latlongArray = latlog.split('::');
var searchLat_scope = latlongArray[0];
var searchLon_scope = latlongArray[1];
if (searchLat_scope && searchLon_scope) {
Property.getAllProperties({
dest: 'property',
apikey: API_KEY,
limit: 10,
lat: encodeURIComponent(searchLat_scope),
lon: encodeURIComponent(searchLon_scope)
}).$promise.then(function(success) {
var propertyMarkers = [];
$scope.dbMarkers = 0;
// create an array of promises by mapping the array `success.property`.
var promises = success.property.map(function(prop) {
return Property.getProperty({
dest: 'property',
propertyId: prop.name,
apikey: API_KEY
}).$promise.then(function(propertyData) {
for (var j=0, k=propertyData.services.length; j<k; j++) {
if (propertyData.services[j].name === 'parking') {
propertyMarkers.push({
'propertyName': prop.propertyName,
'telephone': prop.telephone,
'postcode': prop.address.postcode,
'city': prop.address.city,
'county': prop.address.county,
'addressLine1': prop.address.addressLine1
});
break; // 'parking' is found - no point iterating further
}
}
});
});
/* ******** */
// Aggregate `promises`
$q.all(promises).then(function() {
// This block is now un-nested from its original position,
// and will execute when all `promises` have resolved.
if (propertyMarkers.length > 0) {
$scope.dbMarkers = propertyMarkers;
$scope.selectedLat = searchLat_scope;
$scope.selectedlog = searchLon_scope;
} else {
$scope.locationErr = true;
$scope.errorMsg = 'No parking found';
}
});
/* ******** */
}).catch(function(error) {
$scope.locationErr = true;
$scope.errorMsg = 'Something went wrong, please try again';
});
} else {
$scope.locationErr = true;
$scope.errorMsg = 'Problem with lat/lng data';
}
}
Notes :
that the outer iteration is now coded as success.property.map(), which returns promises and avoids the need for an IIFE.
Extra error handling added
If I got your problem right, you want all Property.getProperty promises of success.property are resolved before going to the success function to check propertyMarkers length.
In that case, you need $q.all to resolve all the Property.getProperty promises for you.
In your for (var i = 0, l = success.property.length; i < l; i++) { appends all the promises into an array
Property.getProperty({
dest: 'property',
propertyId: success.property[i].name,
apikey: API_KEY
})
then use $q.all(arrPromises).then(function(propertyData) { to do the following check.
One more thing worth to mention, promise chaining can be easily achieved by $promise.then(successFn, failFn).then(successFn, failFn).then.... Every time you call then() will create another promise which allows you to chain to next promise and pass value to the next.
I have an Angular SPA running on a SharePoint 2013 page. In the code, I'm using $q to pull data from 10 different SharePoint lists using REST and then merging them into one JSON object for use in a grid. The code runs and outputs the intended merged data but it's leaky and crashes the browser after a while.
Here's the code in the service:
factory.getGridInfo = function() {
var deferred = $q.defer();
var list_1a = CRUDFactory.getListItems("ListA", "column1,column2,column3");
var list_1b = CRUDFactory.getListItems("ListB", "column1,column2,column3");
var list_2a = CRUDFactory.getListItems("ListC", "column4");
var list_2b = CRUDFactory.getListItems("ListD", "column4");
var list_3a = CRUDFactory.getListItems("ListE", "column5");
var list_3b = CRUDFactory.getListItems("ListF", "column5");
var list_4a = CRUDFactory.getListItems("ListG", "column6");
var list_4b = CRUDFactory.getListItems("ListH", "column6");
var list_5a = CRUDFactory.getListItems("ListI", "column7");
var list_5b = CRUDFactory.getListItems("ListJ", "column7");
$q.all([list_1a, list_1b, list_2a, list_2b, list_3a, list_3b, list_4a, list_4b, list_5a, list_5b])
.then(function(results){
var results_1a = results[0].data.d.results;
var results_1b = results[1].data.d.results;
var results_2a = results[2].data.d.results;
var results_2b = results[3].data.d.results;
var results_3a = results[4].data.d.results;
var results_3b = results[5].data.d.results;
var results_4a = results[6].data.d.results;
var results_4b = results[7].data.d.results;
var results_5a = results[8].data.d.results;
var results_5b = results[9].data.d.results;
var combined_1 = results_1a.concat(results_1b);
var combined_2 = results_2a.concat(results_2b);
var combined_3 = results_3a.concat(results_3b);
var combined_4 = results_4a.concat(results_4b);
var combined_5 = results_5a.concat(results_5b);
for(var i = 0; i < combined_1.length; i++){
var currObj = combined_1[i];
currObj["column4"] = combined_2[i].column4;
currObj["column5"] = combined_3[i].column5;
currObj["column6"] = combined_4[i].column6;
currObj["column7"] = combined_5[i].column7;
factory.newObjectArray[i] = currObj;
}
deferred.resolve(factory.newObjectArray);
},
function (error) {
deferred.reject(error);
});
return deferred.promise;
};
Here's the REST call in CRUDFactory:
factory.getListItems = function (listName, columns){
var webUrl = _spPageContextInfo.webAbsoluteUrl + "/_api/web/lists/getByTitle('"+listName+"')/items?$select="+columns+"&$top=5000";
var options = {
headers: { "Accept": "application/json; odata=verbose" },
method: 'GET',
url: webUrl
};
return $http(options);
};
And then here's the controller bit:
$scope.refreshGridData = function(){
$scope.hideLoadingGif = false;
$scope.GridData = "";
GlobalFactory.getGridInfo()
.then(function(){
$scope.GridData = GlobalFactory.newObjectArray;
$scope.hideLoadingGif = true;
});
};
UPDATE 1: Per request, Here's the HTML (just a simple div that we're using angular-ui-grid on)
<div ui-grid="GridOptions" class="grid" ui-grid-selection ui-grid-exporter ui-grid-save-state></div>
This code starts by declaring some get calls and then uses $q.all to iterate over the calls and get the data. It then stores the results and merges them down to 5 total arrays. Then, because my list structure is proper and static, I'm able to iterate over one of the merged arrays and pull data from the other arrays into one master array that I'm assigning to factory.newObjectArray, which I'm declaring as a global in my service and using as my grid data source.
The code runs and doesn't kick any errors up but the issue is with (I believe) the "getGridInfo" function. If I don't comment out any of the REST calls, the browser uses 45 MB of data that doesn't get picked up by GC which is then compounded for each click until the session is ended or crashes. If I comment out all the calls but one, my page only uses 18.4 MB of memory, which is high but I can live with it.
So what's the deal? Do I need to destroy something somewhere? If so, what and how? Or does this relate back to the REST function I'm using?
UPDATE 2: The return result that the grid is using (the factory.newObjectArray) contains a total of 5,450 items and each item has about 80 properties after the merge. The code above is simplified and shows the pulling of a couple columns per list, but in actuality, I'm pulling 5-10 columns per list.
At the end of the day you are dealing with a lot of data, so memory problems are potentially always going to be an issue and you should probably consider whether you need to have all the data in memory.
The main goal you should probably be trying to achieve is limiting duplication of arrays, and trying to keep the memory footprint as low as possible, and freeing memory as fast as possible when you're done processing.
Please consider the following. You mention the actual number of columns being returned are more than your example so I have taken that into account.
factory.getGridInfo = function () {
var deferred = $q.defer(),
// list definitions
lists = [
{ name: 'ListA', columns: ['column1', 'column2', 'column3'] },
{ name: 'ListB', columns: ['column1', 'column2', 'column3'], combineWith: 'ListA' },
{ name: 'ListC', columns: ['column4'] },
{ name: 'ListD', columns: ['column4'], combineWith: 'ListC' },
{ name: 'ListE', columns: ['column5'] },
{ name: 'ListF', columns: ['column5'], combineWith: 'ListE' },
{ name: 'ListG', columns: ['column6'] },
{ name: 'ListH', columns: ['column6'], combineWith: 'ListG' },
{ name: 'ListI', columns: ['column7'] },
{ name: 'ListJ', columns: ['column7'], combineWith: 'ListI' },
],
// Combines two arrays without creating a new array, mindful of lenth limitations
combineArrays = function (a, b) {
var len = b.length;
for (var i = 0; i < len; i = i + 5000) {
a.unshift.apply(a, b.slice(i, i + 5000));
}
};
$q.all(lists.map(function (list) { return CRUDFactory.getListItems(list.name, list.columns.join()); }))
.then(function (results) {
var listResultMap = {}, var baseList = 'ListA';
// map our results to our list names
for(var i = 0; i < results.length; i++) {
listResultMap[lists[i].name] = results[i].data.d.results;
}
// loop around our lists
for(var i = 0; i < lists.length; i++) {
var listName = lists[i].name, combineWith = lists[i].combineWith;
if(combineWith) {
combineArrays(listResultMap[combineWith], listResultMap[listName]);
delete listResultMap[listName];
}
}
// build result
factory.newObjectArray = listResultMap[baseList].map(function(item) {
for(var i = 0; i < lists.length; i++) {
if(list.name !== baseList) {
for(var c = 0; c < lists[i].columns.length; c++) {
var columnName = lists[i].columns[c];
item[columnName] = listResultMap[list.name][columnName];
}
}
}
return item;
});
// clean up our remaining results
for (var i = 0; i < results.length; i++) {
delete results[i].data.d.results;
delete results[i];
}
deferred.resolve(factory.newObjectArray);
},
function (error) {
deferred.reject(error);
});
return deferred.promise;
};
I would suggest to add some sort of paging option... It's perhaps not a great idea to add all results to one big list.
Next i would suggest against ng-repeat or add a "track by" to the repeat function.
Check out: http://www.alexkras.com/11-tips-to-improve-angularjs-performance/
Fiddler your queries, the issue is probably rendering all the elements in the dom... Which could be kinda slow ( investigate)
I'm having some problems with one async process on nodejs.
I'm getting some data from a remote JSON and adding it in my array, this JSON have some duplicated values, and I need check if it already exists on my array before add it to avoid data duplication.
My problem is when I start the loop between the JSON values, the loop call the next value before the latest one be process be finished, so, my array is filled with duplicated data instead of maintain only one item per type.
Look my current code:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
_.each(data, function(book) {
i++;
console.log('\n\n ------------------------------------------------------------ \n\n');
console.log('BOOK: ' + book.volumeInfo.title);
self.process_author(book, function() { console.log('in author'); });
console.log('\n\n ------------------------------------------------------------');
if(i == data.length) callback();
})
}
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
if(!this.in_array(this.authors, author)) {
this.authors.push(author);
callback();
}
}
}
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i] === obj) return true; }
return false;
}
The result is:
[{name: author1 }, {name: author2}, {name: author1}]
And I need:
[{name: author1 }, {name: author2}]
UPDATED:
The solution suggested by #Zub works fine with arrays, but not with sequelize and mysql database.
When I try to save my authors list on the database, the data is duplicated, because the system started to save another array element before finish to save the last one.
What is the correct pattern on this case?
My code using database is:
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
var self = this;
models.Author.count({ where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(count) {
if(count < 1) {
models.Author.create(author).success(function(author) {
console.log('SALVANDO AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
} else {
models.Author.find({where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(author) {
console.log('FIND AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
}
});
// if(!this.in_array(this.authors, 'name', author)) {
// this.authors.push(author);
// console.log('AQUI NO AUTHOR');
// this.process_publisher(book, callback);
// }
}
}
How can I avoid data duplication in an async process?
This is because you are comparing different objects and result is always false.
Just for experiment type in the console:
var obj1 = {a:1};
var obj2 = {a:1};
obj1 == obj2; //false
When comparing objects (as well as arrays) it only results true when obj1 links to obj2:
var obj1 = {a:1};
var obj2 = obj1;
obj1 == obj2; //true
Since you create new author objects in each process_author call you always get false when comparing.
In your case the solution would be to compare name property for each book:
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i].name === obj.name) return true; }
return false;
}
EDIT (related to your comment question):
I would rewrite process_new_books_list method as follows:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
(function nextBook() {
var book = data[i];
if (!book) {
callback();
return;
}
self.process_author(book, function() {
i++;
nextBook();
});
})();
}
In this case next process_author is being called not immediately (like with _.each), but after callback is executed, so you have consequence in your program.
Not sure is this works though.
Sorry for my English, I'm not a native English speaker