can i override a method defined in node_modules file? - reactjs

I am using sync Fusion’s React schedule to build a scheduler application using Meteor/React.
In my meteor application, in client/components folder, there lies a file ’schedule.js’.
It has the following piece of code :
function onEventRendered(args) {
categoryColor=args.data.Teacher;
console.log(args.data, );
if (!args.element || !categoryColor) {
return;
}
if (this.currentView === 'Agenda') {
(args.element.firstChild).style.borderLeftColor = categoryColor;
} else {
args.element.style.backgroundColor = categoryColor;
}
}
Whenever onEventRendered triggers, it automatically calls one of the methods that lie in node_modules/ej2-schedule/src/schedule/actions/crud.js
Crud.prototype.addEvent = function (eventData) {
var fields = this.parent.eventFields;
var promise = null;
var editParms = { addedRecords: [], changedRecords: [], deletedRecords: [] };
var args = {
cancel: false,
data: (eventData instanceof Array) ? eventData : [eventData],
requestType: 'eventCreate'
};
this.parent.trigger(events.actionBegin, args);
if (args.cancel) {
return;
}
if (eventData instanceof Array) {
for (var _i = 0, _a = eventData; _i < _a.length; _i++) {
var event_1 = _a[_i];
this.processCrudTimezone(event_1);
editParms.addedRecords.push(event_1);
}
promise =
this.parent.dataModule.dataManager.saveChanges(editParms, fields.id, this.getTable(), this.getQuery());
}
else {
this.processCrudTimezone(eventData);
promise = this.parent.dataModule.dataManager.insert(eventData, this.getTable(), this.getQuery());
}
var crudArgs = { requestType: 'eventCreated', cancel: false, data: eventData, promise: promise };
this.refreshData(crudArgs);
};
I want to just add a line to call a meteor method ‘event.add’ in this method, so that data can be saved in database. How can this be achieved?

We can perform CRUD using MongoDB in our application level without modifying any source file in node_modules. We have prepared a sample for your reference which can be downloaded from the below location.
http://www.syncfusion.com/downloads/support/directtrac/general/ze/Sample1414642222
In the above sample, we have added CRUD actions code snippet in server.js.
app.post("/GetData", (req, res) => { //executes on initial loading and for each CRUD actions
dbo.collection('ScheduleData').find({}).toArray((err, cus) => {
res.send(cus);
});
});
app.post("/BatchData", (req, res) => {
var eventData = [];
if (req.body.action == "insert" || (req.body.action == "batch" && req.body.added.length > 0)) { //this block will execute while adding events
(req.body.action == "insert") ? eventData.push(req.body.value) : eventData = req.body.added;
for (var i = 0; i < eventData.length; i++) {
var sdate = new Date(eventData[i].StartTime);
var edate = new Date(eventData[i].EndTime);
eventData[i].StartTime = (new Date(+sdate - (sdate.getTimezoneOffset() * 60000)));
eventData[i].EndTime = (new Date(+edate - (edate.getTimezoneOffset() * 60000)));
dbo.collection('ScheduleData').insertOne(eventData[i]); //to add the events in MongoDB collection
}
}
if (req.body.action == "update" || (req.body.action == "batch" && req.body.changed.length > 0)) { //this block will execute while editing events
(req.body.action == "update") ? eventData.push(req.body.value) : eventData = req.body.changed;
for (var i = 0; i < eventData.length; i++) {
delete eventData[i]._id;
var sdate = new Date(eventData[i].StartTime);
var edate = new Date(eventData[i].EndTime);
eventData[i].StartTime = (new Date(+sdate - (sdate.getTimezoneOffset() * 60000)));
eventData[i].EndTime = (new Date(+edate - (edate.getTimezoneOffset() * 60000)));
dbo.collection('ScheduleData').updateOne({ "Id": eventData[i].Id }, eventData[i]); //to update the events in MongoDB collection
}
}
if (req.body.action == "remove" || (req.body.action == "batch" && req.body.deleted.length > 0)) { //this block will execute while deleting events
(req.body.action == "remove") ? eventData.push(req.body.value) : eventData = req.body.deleted;
for (var i = 0; i < eventData.length; i++) {
dbo.collection('ScheduleData').deleteOne({ "Id": eventData[i].Id }, eventData[i]); //to delete the events in MongoDB collection
}
}
res.send(req.body);
});
In the below code we have given the GetData and BatchData url path to initial fetching and for performing CRUD actions using UrlAdaptor and assigned it to the scheduler datasource.
let data = new DataManager({ url: 'http://localhost:5000/GetData', crudUrl: 'http://localhost:5000/BatchData', adaptor: new UrlAdaptor, crossDomain: true });
eventSettings={{ dataSource: data }}
Steps to run the sample:
Run MongoDB and create the collection named ‘ScheduleData’ in ‘mydb’ database.
Run the below comments
npm install
npm run server
npm start

Related

contentVersion (image/png) created is empty

//lightning controller- Here I am capturing the signature in a hidden canvas and extracting the base64 data from it , and sending it to the server side apex
var tCtx = document.getElementById('textCanvas').getContext('2d'),
imageElem = document.getElementById('Signimage');
tCtx.canvas.width = 720;
tCtx.canvas.height= 100;
tCtx.font = "italic 30px monospace";
var theSignature = n; // name of person - the text that is to be converted to an img
tCtx.fillText(theSignature,10, 50);
imageElem.src = tCtx.canvas.toDataURL();
var base64Canvas = tCtx.canvas.toDataURL().split(';base64,')[1];
component.set('{!v.storeApplicantSign}',base64Canvas);
//lightning helper
uploadonSubmit: function(component,event,helper) {
// call the apex method 'saveChunk'
var action = component.get("c.saveChunk");
action.setParams({
parentId: component.get("v.recordId"),
base64Data: component.get("v.storeApplicantSign"), // contains the base64 data
});
// set call back
action.setCallback(this, function(response) {
// store the response / Attachment Id
var result = response.getReturnValue();
var state = response.getState();
if (state === "SUCCESS") {
alert("Success");
// this.showtheToast();
} else if (state === "INCOMPLETE") {
alert("From server: " + response.getReturnValue());
} else if (state === "ERROR") {
var errors = response.getError();
if (errors) {
if (errors[0] && errors[0].message) {
console.log("Error message: " + errors[0].message);
}
} else {
console.log("Unknown error");
}
}
});
// enqueue the action
$A.enqueueAction(action);
},
// apex class
//Here decoding the data from the lightning and creating content version
#AuraEnabled
public static Id saveChunk(Id parentId,String base64Data) {
String fileId = saveTheFile(parentId,base64Data,'Signature.png');
return Id.valueOf(fileId);
}
public static Id saveTheFile(Id parentId,String base64Data,String fileName) {
base64Data = EncodingUtil.urlDecode(base64Data,'UTF-8');
ContentVersion contentVersion = new ContentVersion(
versionData = EncodingUtil.base64Decode(base64Data),
title = fileName,
pathOnClient = 'Signature'+'.'+'png',
ContentLocation='S',
FirstPublishLocationId = parentId);
system.debug('contentversion data=> '+contentVersion+'version data ----> '+contentVersion.VersionData);
insert contentVersion;
return contentVersion.Id;
}
// File is being created but it's empty that is image is not there / can't be opened as img
The issue was in apex side :
just removed the line :
`base64Data = EncodingUtil.urlDecode(base64Data,'UTF-8'); // this wasn't required
removing the above line solved it .

Angular nested Promise shows an error

I have 2 API calls.
The second API call depends on the Property ID returned to make the second API call to check if each of these properties has parking.
If it does, then I add details of that property to an object and push the object into an Array.
The second API call is nested inside the first. After I've looped through all the properties, I check if the Array length is more than 0, if it is then I can display the returned properties in page, else it shows an error.
The problem is even when there are properties returned with parking, the else statement or error function executes, as well as displaying properties on the page.
Is there a way to complete the nested Promise before checking if my Array is more than 0?
Here's my code:
$scope.viewPropertyList = function(latlong) {
$scope.locationError = false;
var latlongArray = latlog.split('::');
var searchLat_scope = latlongArray[0];
var searchLon_scope = latlongArray[1];
if (searchLat_scope && searchLon_scope) {
var data = Property.getAllProperties({
dest: 'property',
apikey: API_KEY,
lat: encodeURIComponent(searchLat_scope),
limit: 10,
lon: encodeURIComponent(searchLon_scope)
}).$promise.then(function(success) {
var propertyMarkers = [];
$scope.dbMarkers = 0;
for (var i = 0, l = success.property.length; i < l; i++) {
(function(i) {
Property.getProperty({
dest: 'property',
propertyId: success.property[i].name,
apikey: API_KEY
}).$promise.then(function(propertyData) {
for (var j = 0, k = propertyData.services.length; j < k; j++) {
if (propertyData.services[j].name === "parking") {
var obj = {
"propertyName": success.property[i].propertyName,
"telephone": success.property[i].telephone,
"postcode": success.property[i].address.postcode,
"city": success.property[i].address.city,
"county": success.property[i].address.county,
"addressLine1": success.property[i].address.addressLine1
};
propertyMarkers.push(obj);
}
}
if (propertyMarkers.length != 0) {
$scope.dbMarkers = propertyMarkers;
$scope.selectedLat = searchLat_scope;
$scope.selectedlog = searchLon_scope;
} else {
$scope.locationErr = true;
$scope.errorMsg = "No properties found";
}
});
})(i);
}
}, function(error) {
$scope.locationErr = true;
$scope.errorMsg = "Something went wrong, please try again";
});
}
}
Two main things :
there's no attempt to aggregate multiple promises generated in a loop.
the if (propertyMarkers.length > 0) {...} else {...} is too deeply nested.
Minor :
the inner iteration can break as soon as 'parking' is found. If it continued and further 'parking' was found, then duplicate markers would be created.
$scope.viewPropertyList = function(latlong) {
$scope.locationError = false;
var latlongArray = latlog.split('::');
var searchLat_scope = latlongArray[0];
var searchLon_scope = latlongArray[1];
if (searchLat_scope && searchLon_scope) {
Property.getAllProperties({
dest: 'property',
apikey: API_KEY,
limit: 10,
lat: encodeURIComponent(searchLat_scope),
lon: encodeURIComponent(searchLon_scope)
}).$promise.then(function(success) {
var propertyMarkers = [];
$scope.dbMarkers = 0;
// create an array of promises by mapping the array `success.property`.
var promises = success.property.map(function(prop) {
return Property.getProperty({
dest: 'property',
propertyId: prop.name,
apikey: API_KEY
}).$promise.then(function(propertyData) {
for (var j=0, k=propertyData.services.length; j<k; j++) {
if (propertyData.services[j].name === 'parking') {
propertyMarkers.push({
'propertyName': prop.propertyName,
'telephone': prop.telephone,
'postcode': prop.address.postcode,
'city': prop.address.city,
'county': prop.address.county,
'addressLine1': prop.address.addressLine1
});
break; // 'parking' is found - no point iterating further
}
}
});
});
/* ******** */
// Aggregate `promises`
$q.all(promises).then(function() {
// This block is now un-nested from its original position,
// and will execute when all `promises` have resolved.
if (propertyMarkers.length > 0) {
$scope.dbMarkers = propertyMarkers;
$scope.selectedLat = searchLat_scope;
$scope.selectedlog = searchLon_scope;
} else {
$scope.locationErr = true;
$scope.errorMsg = 'No parking found';
}
});
/* ******** */
}).catch(function(error) {
$scope.locationErr = true;
$scope.errorMsg = 'Something went wrong, please try again';
});
} else {
$scope.locationErr = true;
$scope.errorMsg = 'Problem with lat/lng data';
}
}
Notes :
that the outer iteration is now coded as success.property.map(), which returns promises and avoids the need for an IIFE.
Extra error handling added
If I got your problem right, you want all Property.getProperty promises of success.property are resolved before going to the success function to check propertyMarkers length.
In that case, you need $q.all to resolve all the Property.getProperty promises for you.
In your for (var i = 0, l = success.property.length; i < l; i++) { appends all the promises into an array
Property.getProperty({
dest: 'property',
propertyId: success.property[i].name,
apikey: API_KEY
})
then use $q.all(arrPromises).then(function(propertyData) { to do the following check.
One more thing worth to mention, promise chaining can be easily achieved by $promise.then(successFn, failFn).then(successFn, failFn).then.... Every time you call then() will create another promise which allows you to chain to next promise and pass value to the next.

How to write more than 25 items/rows into Table for DynamoDB?

I am quite new to Amazon DynamoDB. I currently have 20000 rows that I need to add to a table. However, based on what I've read, it seems that I can only write up to 25 rows at a time using BatchWriteItem class with 25 WriteRequests. Is it possible to increase this? How can I write more than 25 rows at a time? It is currently taking about 15 minutes to write all 20000 rows. Thank you.
You can only send up to 25 items in a single BatchWriteItem request, but you can send as many BatchWriteItem requests as you want at one time. Assuming you've provisioned enough write throughput, you should be able to speed things up significantly by splitting those 20k rows between multiple threads/processes/hosts and pushing them to the database in parallel.
It's maybe a bit heavyweight for that small of a dataset, but you can use AWS Data Pipeline to ingest data from S3. It basically automates the process of creating a Hadoop cluster to suck down your data from S3 and send it to DynamoDB in a bunch of parallel BatchWriteItem requests.
I was looking for some code to do this with the JavaScript SDK. I couldn't find it, so I put it together myself. I hope this helps someone else!
function multiWrite(table, data, cb) {
var AWS = require('aws-sdk');
var db = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for(var x in data) {
// Add the item to the current batch
item_count++;
current_batch.push({
PutRequest: {
Item: data[x]
}
});
// If we've added 25 items, add the current batch to the batches array
// and reset it
if(item_count%25 == 0) {
batches.push(current_batch);
current_batch = [];
}
}
// Add the last batch if it has records and is not equal to 25
if(current_batch.length > 0 && current_batch.length != 25) batches.push(current_batch);
// Handler for the database operations
var completed_requests = 0;
var errors = false;
function handler(request) {
return function(err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
}
// Make the callback if we've completed all the requests
if(completed_requests == batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for(x in batches) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}';
params = JSON.parse(params);
params.RequestItems[table] = batches[x];
// Perform the batchWrite operation
db.batchWrite(params, handler(params));
}
}
function putInHistory(data,cb) {
var arrayOfArray25 = _.chunk(data, 25);
async.every(arrayOfArray25, function(arrayOf25, callback) {
var params = {
RequestItems: {
[TABLES.historyTable]: []
}
};
arrayOf25.forEach(function(item){
params.RequestItems[TABLES.historyTable].push({
PutRequest: {
Item: item
}
})
});
docClient.batchWrite(params, function(err, data) {
if (err){
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, true);
};
});
}, function(err, result) {
if(err){
cb(err);
} else {
if(result){
cb(null,{allWritten:true});
} else {
cb(null,{allWritten:false});
}
}
});
}
You can use lodash to make chunks of data from the array and then use async library's each/every method to do a batchWrite on chunks of 25 elements
Using aws cli and aws-vault, this is what I do.
Let's imagine you have the following file (data.json) with 1000 rows
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}},
... to 1000
and you need to split it into chunk files with 25 rows in each!
I use the following c# code in LinqPad to generate the .sh file and json chunks to be able to insert them into dynamodb using aws cli
void Main()
{
var sourcePath= #"D:\data\whereYourMainJsonFileIsLocated\";
var sourceFilePath = #"data.json";
var awsVaultProfileName = "dev";
var env = "dev";
var tableName = "dynamodb-table-name";
var lines = System.IO.File.ReadAllLines(sourcePath + sourceFilePath);
var destinationPath = Path.Combine(sourcePath, env);
var destinationChunkPath = Path.Combine(sourcePath, env, "chunks");
if (!System.IO.Directory.Exists(destinationChunkPath))
System.IO.Directory.CreateDirectory(destinationChunkPath);
System.Text.StringBuilder shString= new System.Text.StringBuilder();
for (int i = 0; i < lines.Count(); i = i+25)
{
var pagedLines = lines.Skip(i).Take(25).ToList().Distinct().ToList();
System.Text.StringBuilder sb = new System.Text.StringBuilder();
sb.AppendLine("{");
sb.AppendLine($" \"{tableName}\": [");
foreach (var element in pagedLines)
{
if (element == pagedLines.Last())
sb.AppendLine(element.Substring(0, element.Length-1));
else
sb.AppendLine(element);
}
sb.AppendLine("]");
sb.AppendLine("}");
var fileName = $"chunk{i / 25}.json";
System.IO.File.WriteAllText(Path.Combine(destinationChunkPath, fileName), sb.ToString(), Encoding.Default);
shString.AppendLine($#"aws-vault.exe exec {awsVaultProfileName} -- aws dynamodb batch-write-item --request-items file://chunks/{fileName}");
}
System.IO.File.WriteAllText(Path.Combine(destinationPath, $"{tableName}-{env}.sh"), shString.ToString(), Encoding.Default);
}
the result would be chunk files as chunk0.json, chunk1.json, etc
{
"dynamodb-table-name": [
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}}
]
}
and .sh file
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk0.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk1.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk2.json
and finally just run the .sh file and you have all data in your table!
From the answer from #Geerek here is the solution with a lambda function:
exports.handler = (event, context, callback) => {
console.log(`EVENT: ${JSON.stringify(event)}`);
var AWS = require('aws-sdk');
AWS.config.update({ region: process.env.REGION })
var docClient = new AWS.DynamoDB.DocumentClient();
const {data, table, cb} = event
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for (var i = 0; i < data.length; i++) {
// Add the item to the current batch
item_count++
current_batch.push({
PutRequest: {
Item: data[i],
},
})
// If we've added 25 items, add the current batch to the batches array
// and reset it
if (item_count % 25 === 0) {
batches.push(current_batch)
current_batch = []
}
}
// Add the last batch if it has records and is not equal to 25
if (current_batch.length > 0 && current_batch.length !== 25) {
batches.push(current_batch)
}
// Handler for the database operations
var completed_requests = 0
var errors = false
function handler (request) {
console.log('in the handler: ', request)
return function (err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
callback(err);
}else {
callback(null, data);
}
// Make the callback if we've completed all the requests
if(completed_requests === batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for (var j = 0; j < batches.length; j++) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}'
params = JSON.parse(params)
params.RequestItems[table] = batches[j]
console.log('before db.batchWrite: ', params)
// Perform the batchWrite operation
docClient.batchWrite(params, handler(params))
}
};
I wrote an npm package that should work as a simple drop-in replacement for the batchWrite method, you just need to pass the dynamoDB instance as the first parameter and things should work:
https://www.npmjs.com/package/batch-write-all
Check the example in the project readme file:
// Use bellow instead of this: dynamodb.batchWrite(params).promise();
batchWriteAll(dynamodb, params).promise();
const { dynamoClient } = require("./resources/db");
const { v4: uuid } = require("uuid");
const batchWriteLooper = async () => {
let array = [];
for (let i = 0; i < 2000; i++) {
array.push({
PutRequest: {
Item: {
personId: uuid(),
name: `Person ${i}`,
age: Math.floor(Math.random() * 100),
gender: "Male",
createdAt: new Date(),
updatedAt: new Date(),
},
},
});
}
var perChunk = 20; // items per chunk
var result = array.reduce((resultArray, item, index) => {
const chunkIndex = Math.floor(index / perChunk);
if (!resultArray[chunkIndex]) {
resultArray[chunkIndex] = []; // start a new chunk
}
resultArray[chunkIndex].push(item);
return resultArray;
}, []);
Promise.all(
result.map(async (chunk) => {
const params = {
RequestItems: {
"persons": chunk,
},
};
return await dynamoClient.batchWrite(params).promise();
})
).then(() => {
console.log("done");
});
};
batchWriteLooper();

angular's equivalent of ko.utils.arrayMap or adding extra properties to returned data array?

Im in the process of converting a knockout app to angular, I currently get an array of objects from the server but I would like to extend each object by adding some extra properties.
In knockout I would do the following:
var mappedResults = ko.utils.arrayMap(results, function(item) {
item.selected = ko.observable(true);
item.viewPreview = ko.observable(false);
return new reed.search.Candidate(item, self.viewModel.fileDownloadFailCookieName);
});
and the Candidate viewmodel:
reed.search.Candidate = function(data, fileDownloadFailCookieName) {
debugger
if (data == null) {
throw 'Error: cannot initiate candidate';
}
this.fileDownloadFailCookieName = fileDownloadFailCookieName;
this.candidateId = data.CandidateId;
this.name = data.Name;
this.surname = data.Surname;
this.forename = data.Forename;
this.displayLocation = data.DisplayLocation;
this.lastJobDetails = data.LastJobDetails;
this.displayPayRate = data.DisplayPayRate;
this.lastSignIn = data.LastSignIn;
this.downloadCVUrl = data.DownloadCVUrl;
this.additionalInfo = data.AdditionalInfo;
this.isAvailable = (data.IsAvailable) ? "Availability confirmed" : "";
this.availableMornings = data.AvailableMornings;
this.availableAfternoons = data.AvailableAfternoons;
this.availableEvenings = data.AvailableEvenings;
this.availableWeekends = data.AvailableWeekends;
this.availableShiftWork = data.AvailableShiftWork;
this.availableNights = data.AvailableNights;
this.availabilityUpdatedOn = data.AvailabilityUpdatedOn;
this.availabilityUpdatedOnDate = "| <strong>Availability updated</strong> " + data.AvailabilityUpdatedOn;
this.isAvailableForSomething =
this.availableMornings
|| this.availableAfternoons
|| this.availableEvenings
|| this.availableWeekends
|| this.availableShiftWork
|| this.availableNights;
this.viewPreview = ko.observable(false);
this.selected = ko.observable(false);
this.hasBeenNotified = ko.observable(false);
this.select = function() {
this.selected(true);
};
this.deSelect = function() {
this.selected(false);
};
this.HasFlagSet = function(availability) {
return availability ? "availabilitySelected" : "availabilityNotSelected";
};
this.ajaxCvDownload = function() {
var path = window.location.href,
iframeError,
cookieName = this.fileDownloadFailCookieName;
// download path
path = path.match(/(.+\/)/ig)[0];
if (path.match(/home/ig)) {
path = path.replace('home', this.downloadCVUrl);
} else {
path = this.downloadCVUrl;
};
$('<iframe />').attr('src', path)
.hide()
.appendTo('body').load(function() {
var message = decodeURIComponent(reed.shared.utils.getCookie(cookieName));
message = message.replace(/\+/g, " ");
if (message.length > 0 && message != "null") {
reed.shared.utils.showMessage(message, "Download Failed");
}
});
}
}
how can I achieve the same functionality in angular?
You don't need angular for this array itself contains a map function and all modern browsers support it.
var mappedResults = results.map(function(item) {
item.selected = true;
item.viewPreview = false;
return new reed.search.Candidate(item,
self.viewModel.fileDownloadFailCookieName);
});
Some other things you can improve. Firstly if you are using webapi to return data, use a formatter that fixes casing.Check this blog http://blogs.msmvps.com/theproblemsolver/2014/03/26/webapi-pascalcase-and-camelcase/
Once you have the formatter lines such as these are not required
this.surname = data.Surname;
You can then use angular.extend to copy properties into your class.

Nodejs async data duplication

I'm having some problems with one async process on nodejs.
I'm getting some data from a remote JSON and adding it in my array, this JSON have some duplicated values, and I need check if it already exists on my array before add it to avoid data duplication.
My problem is when I start the loop between the JSON values, the loop call the next value before the latest one be process be finished, so, my array is filled with duplicated data instead of maintain only one item per type.
Look my current code:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
_.each(data, function(book) {
i++;
console.log('\n\n ------------------------------------------------------------ \n\n');
console.log('BOOK: ' + book.volumeInfo.title);
self.process_author(book, function() { console.log('in author'); });
console.log('\n\n ------------------------------------------------------------');
if(i == data.length) callback();
})
}
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
if(!this.in_array(this.authors, author)) {
this.authors.push(author);
callback();
}
}
}
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i] === obj) return true; }
return false;
}
The result is:
[{name: author1 }, {name: author2}, {name: author1}]
And I need:
[{name: author1 }, {name: author2}]
UPDATED:
The solution suggested by #Zub works fine with arrays, but not with sequelize and mysql database.
When I try to save my authors list on the database, the data is duplicated, because the system started to save another array element before finish to save the last one.
What is the correct pattern on this case?
My code using database is:
BookRegistration.prototype.process_author = function(book, callback) {
if(book.volumeInfo.authors) {
var author = { name: book.volumeInfo.authors[0].toLowerCase() };
var self = this;
models.Author.count({ where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(count) {
if(count < 1) {
models.Author.create(author).success(function(author) {
console.log('SALVANDO AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
} else {
models.Author.find({where: { name: book.volumeInfo.authors[0].toLowerCase() }}).success(function(author) {
console.log('FIND AUTHOR');
self.process_publisher({ book:book, author:author }, callback);
});
}
});
// if(!this.in_array(this.authors, 'name', author)) {
// this.authors.push(author);
// console.log('AQUI NO AUTHOR');
// this.process_publisher(book, callback);
// }
}
}
How can I avoid data duplication in an async process?
This is because you are comparing different objects and result is always false.
Just for experiment type in the console:
var obj1 = {a:1};
var obj2 = {a:1};
obj1 == obj2; //false
When comparing objects (as well as arrays) it only results true when obj1 links to obj2:
var obj1 = {a:1};
var obj2 = obj1;
obj1 == obj2; //true
Since you create new author objects in each process_author call you always get false when comparing.
In your case the solution would be to compare name property for each book:
BookRegistration.prototype.in_array = function(list, obj) {
for(i in list) { if(list[i].name === obj.name) return true; }
return false;
}
EDIT (related to your comment question):
I would rewrite process_new_books_list method as follows:
BookRegistration.prototype.process_new_books_list = function(data, callback) {
var i = 0,
self = this;
(function nextBook() {
var book = data[i];
if (!book) {
callback();
return;
}
self.process_author(book, function() {
i++;
nextBook();
});
})();
}
In this case next process_author is being called not immediately (like with _.each), but after callback is executed, so you have consequence in your program.
Not sure is this works though.
Sorry for my English, I'm not a native English speaker

Resources