Angular nested loop promises - angularjs

I'm trying to build a Cordova/angular app with a SQLite plugin to make it work offline but I'm having a lot of trouble with the promise and asynchronous of the query to the database this is my situation:
I have 5 table configured like this:
Table Pages with id, title, template_id and body
Table Menu with id and tag
Table PageMenus with id and page_id and menu_id to associate page and menu
Table MeniItems with id menu_id and body with the "actual" element in teach menu
Table template with id and tag to select the right view
for compatibility reason (I'm using the same code for the webapp and the mobile app and for the webapp I call my API while on mobile I download all the content on the device) I need to retrive the pages in this format:
{
"id": 1
"body": "Welcome to the homepage",
"title": "Homepage",
"template_tag": "tab",
"menus": [
{
"id": 3,
"tag": "home_menu",
"menu_items": [
{
"menu_id": 3,
"body": "Movie"
},
{
"menu_id": 3,
"body": "Restaurant"
},
{
"menu_id": 3,
"body": "Messages"
},
{
"menu_id": 3,
"body": "Systems"
}
]
},
{
"id": 62,
"tag": "user_menu",
"menu_items": [
{
"menu_id": 62,
"body": "About"
},
{
"menu_id": 62,
"body": "Updates"
},
{
"menu_id": 62,
"body": "Help"
},
{
"menu_id": 62,
"body": "Reset Password"
},
{
"menu_id": 62,
"body": "Report/ Feedback"
}
]
}
]
}
I'm already able to get the right format but my problem is that the controller tries to access the body of the menu before is resolved so I get error undefined this is the code I'm using in my factory at themoment:
return {
getHomePage: function() {
// other function
},
getPage: function(id) {
var results = $q.defer();
function request() {
var res = {};
var queryPage = "SELECT pages.id, pages.body, pages.title, templates.tag AS template_tag FROM pages JOIN templates ON pages.template_id = templates.id WHERE pages.id = ?";
$cordovaSQLite.execute(db, queryPage, [id]).then(function(page) {
res = page.rows.item(0);
res.menus = [];
var queryMenus = "SELECT menus.id, menus.tag FROM menus JOIN page_menus ON menus.id = page_menus.menu_id WHERE page_menus.page_id = ?";
$cordovaSQLite.execute(db, queryMenus, [res.id]).then(function(menus) {
for (var i = 0; i < menus.rows.length; i++) {
var menu = {
id: menus.rows.item(i).id,
tag: menus.rows.item(i).tag,
menu_items: []
};
var queryMenuItems = "SELECT * FROM menu_items JOIN menus ON menu_items.menu_id = menus.id where menus.id = ?"
$cordovaSQLite.execute(db, queryMenuItems, [menus.rows.item(i).id]).then(function(menu_items) {
for (var i = 0; i < menu_items.rows.length; i++) {
menu.menu_items.push(menu_items.rows.item(i));
}
});
res.menus.push(menu);
};
results.resolve(res);
});
});
};
request();
return results.promise;
},
getMedia: function(id) {
// other function
}
};

It's a good practice to chain promises the way:
getSomething: function(...) {
return requestReturningPromise1.then(function(resultOfPromise1) {
// Do something here (prepare next request,...)
return requestReturningPromise2;
}).then(function(resultOfPromise2) {
// Do something here (prepare next request,...)
return requestReturningPromise3;
}).then(function(resultOfPromise3) {
// Do something here (prepare next request,...)
return finalReturn;
});
}
Nesting is reduced, it's more readable and easy to debug.
So applying it to your code gives something like this:
getPage: function(id) {
var res = {};
var queryPage = "SELECT pages.id, pages.body, pages.title, templates.tag AS template_tag FROM pages JOIN templates ON pages.template_id = templates.id WHERE pages.id = ?";
return $cordovaSQLite.execute(db, queryPage, [id]).then(function(page) {
res = page.rows.item(0);
res.menus = [];
var queryMenus = "SELECT menus.id, menus.tag FROM menus JOIN page_menus ON menus.id = page_menus.menu_id WHERE page_menus.page_id = ?";
return $cordovaSQLite.execute(db, queryMenus, [res.id]);
}).then(function(menus) {
var menuPromises = [];
for (var i = 0; i < menus.rows.length; i++) {
var menu = {
id: menus.rows.item(i).id,
tag: menus.rows.item(i).tag,
menu_items: []
};
var queryMenuItems = "SELECT * FROM menu_items JOIN menus ON menu_items.menu_id = menus.id where menus.id = ?";
var menuPromise = $cordovaSQLite.execute(db, queryMenuItems, [menus.rows.item(i).id]).then(function(menu_items) {
for (var i = 0; i < menu_items.rows.length; i++) {
menu.menu_items.push(menu_items.rows.item(i));
}
return menu;
});
menuPromises.push(menuPromise);
}
return Promise.all(menuPromises);
}).then(function(menus) {
for (var i = 0; i < menus.length; i++) {
res.menus.push(menus[i]);
}
return res;
});
}
Note that in the code above, the service itself returns a promise so you have to consume it like this in your controller:
MyService.getPage(id).then(function(page) {
// here, bind the result page to your controller scope ...
});

This is what I end up doing (I post it even if I don't think will be really helpful for someone else because is heavily based on the structure of my DB):
var promisePage = $cordovaSQLite.execute(db, "SELECT p.*, t.tag AS template_tag FROM pages AS p JOIN templates AS t ON p.template_id = t.id WHERE p.id = ?", [id]);
var promiseMenus = $cordovaSQLite.execute(db, "SELECT m.* FROM menus AS m JOIN page_menus AS pm ON m.id = pm.menu_id WHERE pm.page_id = ?", [id]);
var promiseMenuItems = $cordovaSQLite.execute(db, "SELECT mi.* FROM menu_items AS mi JOIN menus AS m ON mi.menu_id = m.id JOIN page_menus AS pm ON pm.menu_id = m.id WHERE pm.page_id = ?", [id]);
return $q.all([promisePage, promiseMenus, promiseMenuItems]).then(function(data) {
var page = data[0].rows.item(0);
var menus = data[1];
var menuItems = data[2];
// here there is some boring code to construct the page
return page;
Simply instead of querying the db for menu and menu items after I got the page I query all three element in parallel and then do all the work in a $q.all.

Related

Want to loop through a json array after selecting a dropdown box list

I want to be able to select a list of options from a dropdown box with "Station" values taken from json array -"smallData.json" (which I am able to do and it is working) and then produce a set of results from the same json array based on the specific "station" option chosen from the dropdown list (which is not working currently). I assume I may need a function to be called and loop through the array with the onchange method, however, I am not certain how that would work.
[
{
"ID": 1,
"Station": "Carmichael Rd.",
"Address": "54 Myers Rd.",
"Monthly_CStore_Sales": "120,000",
"Operator": "Michael Sears",
"Top_SKU": "Hotdogs"
},
{
"ID": 2,
"Station": "Baillou Hill",
"Address": "564 Jackson Ave.",
"Monthly_CStore_Sales": "89000",
"Operator": "Sarah Pikes",
"Top_SKU": "Patties"
},
{
"ID": 3,
"Station": "Oakesfield",
"Address": "42 Peterson St.",
"Monthly_CStore_Sales": "150000",
"Operator": "Yolanda Gray",
"Top_SKU": "Chicken"
}
]
Code
<select id="dmenu"></select>
<div id="optionT"></div>
<script>
let dropdown = document.getElementById('dmenu');
dropdown.length = 0;
let defaultOption = document.createElement('option');
defaultOption.text = 'Choose Station';
dropdown.add(defaultOption);
dropdown.selectedIndex = 0;
const url = './smallData.json';
const request = new XMLHttpRequest();
request.open('GET', url, true);
request.onload = function() {
if (request.status === 200) {
const data = JSON.parse(request.responseText);
let option;
for (let i = 0; i < data.length; i++) {
option = document.createElement('option');
option.text = data[i].Station;
dropdown.add(option);
var optionText = "";
for (x in data){
optionText += '<ul>' +
'<li>Station: '+ data.Station[x] +'</li>' +
document.getElementById('optionT').innerHTML = optionText;
}
} else {
// Reached the server, but it returned an error
}
}
request.onerror = function() {
console.error('An error occurred fetching the JSON from ' + url);
};
request.send();
</script>
Example:
If I select for example: "Station Carmichael Road", all the key value pairs associated with that Station field I want to come up:
"ID:1",
"Address": "54 Myers Rd.",
"Monthly_CStore_Sales": "120,000",
"Operator": "Michael Sears",
"Top_SKU": "Hotdogs"
You want to show Station's data when a drop-down option is selected.
Add onchange event on your drop-down element.
<select id="dmenu" onchange="handleOnChange(this)"></select>
Then move your list-rendering logic to handleOnChange()
function handleOnChange(selectedDropdown) {
// Find array element by "Station" value
var station = data.find(function(element) {
return element.Station == selectedDropdown.value;
});
// If station exists
if (station) {
// Show station's key-value in list
let optionText = '<ul>';
Object.keys(station).forEach(function(key) {
optionText += '<li>' + key +': ' + station[key] + '</li>';
document.getElementById('optionT').innerHTML = optionText;
});
optionText += '</ul>';
}
}
Full Code
<script>
function handleOnChange(selectedDropdown) {
// Find array element by "Station" value
var station = data.find(function(element) {
return element.Station == selectedDropdown.value;
});
// If station exists
if (station) {
// Show station's key-value in list
let optionText = '<ul>';
Object.keys(station).forEach(function(key) {
optionText += '<li>' + key +': ' + station[key] + '</li>';
document.getElementById('optionT').innerHTML = optionText;
});
optionText += '</ul>';
}
}
let dropdown = document.getElementById('dmenu');
dropdown.length = 0;
let defaultOption = document.createElement('option');
defaultOption.text = 'Choose Station';
dropdown.add(defaultOption);
dropdown.selectedIndex = 0;
const url = './smallData.json';
let data;
const request = new XMLHttpRequest();
request.open('GET', url, true);
request.onload = function() {
if (request.status === 200) {
data = JSON.parse(request.responseText);
let option;
for (let i = 0; i < data.length; i++) {
option = document.createElement('option');
option.text = data[i].Station;
dropdown.add(option);
}
}
else {
// Reached the server, but it returned an error
}
}
request.onerror = function() {
console.error('An error occurred fetching the JSON from ' + url);
};
request.send();
</script>

Why is angular extend function not combining two json objects

I have JavaScript vars so that I can view what is going on underneath the hood, so to say.
The first two vars are displaying their respective objects just fine, but not the third, which is using angular.extend to combine two json objects retrieved from two different databases. One is a local db and the other is from a production db.
The common id between the two json is what I am trying to "merge"
JSON 1:
[{"ID": 1, "TITLE": "CSR", "PHONE": "555-555-1212", "FNAME": "JOHN", "LNAME": "SMITH"}]
JSON 2:
[{"ID": 1, "GROUP_MEMBER_ID": "1","GROUP_MEMBER_TYPE_ID":"4","GROUP_ID":"1"}]
The result that I would like to see:
[{"ID": 1, "GROUP_MEMBER_ID": "1","GROUP_MEMBER_TYPE_ID":"4","GROUP_ID":"1", "TITLE": "CSR", "PHONE": "555-555-1212", "FNAME": "JOHN", "LNAME": "SMITH"}]
I have been trying to use angular.extend to no avail:
// local json objects to view result sets
var mLocalJson = {};
var mProdJson = {};
var mCombinedJson = {};
var teamApp = angular.module('teamApp', [])
teamApp.controller('mainController', function($scope, $http) {
$scope.documentsLocal = [];
$scope.documentsProd = [];
$scope.documentsCombined = [];
$scope.loadDataLocal = function () {
$http.post("php/getTeamsLocal.php")
.then(function(resultLocal) {
$scope.documentsLocal = resultLocal.data;
mLocalJson = resultLocal.data;
});
};
$scope.loadDataProd = function () {
$http.post("php/getTeamsProd.php")
.then(function(resultProd) {
$scope.documentsProd = resultProd.data;
mProdJson = resultProd.data;
});
};
$scope.loadDataCombined = function(){
mCombinedJson = angular.extend($scope.documentsCombined, $scope.documentsProd, $scope.documentsLocal);
};
});
I have tried a for loop. I have tried the following:
$scope.loadDataCombined = function(){
mCombinedJson = angular.extend($scope.documentsCombined, mProdJson, mLocalJson);
};
No matter what I have tried mCombinedJson is NULL
The API is not returning a JavaScript object. It is returning a JavaScript array with contents of one JavaScript object.
Use angular.extend to combine the objects inside the array.
var array1 = [{"ID": 1, "TITLE": "CSR", "PHONE": "555-555-1212", "FNAME": "JOHN", "LNAME": "SMITH"}];
var array2 = [{"ID": 1, "GROUP_MEMBER_ID": "1","GROUP_MEMBER_TYPE_ID":"4","GROUP_ID":"1"}];
var combinedArray = [angular.extend(array1[0],array2[0])];
console.log(combinedArray);
<script src="//unpkg.com/angular/angular.js"></script>
I used for loops to solve my issue:
$scope.loadDataProd = function () {
$http.post("php/getTeamsProd.php")
.then(function(resultProd) {
$scope.documentsProd = resultProd.data;
mProdJson = resultProd.data;
for (var i = 0; i < mLocalJson.length; i++) {
for (var j = 0; j < mProdJson.length; j++) {
if (mLocalJson[i].TEAM_ID == mProdJson[j].TEAMM_ID) {
mCombinedJson.push([{
"TEAM_GROUP_ID": $scope.documentsLocal[i].TEAM_GROUP_ID,
"TEAM_GROUP_MEMBER_ID": $scope.documentsLocal[i].TEAM_GROUP_MEMBER_ID,
"TEAM_GROUP_DESC": $scope.documentsLocal[i].TEAM_GROUP_DESC,
"TEAM_GROUP_MEMBER_TYPE_DESC": $scope.documentsLocal[i].TEAM_GROUP_MEMBER_TYPE_DESC,
"TEAM_ID": $scope.documentsProd[j].TEAMM_ID,
"TEAMM_DESC": $scope.documentsProd[j].TEAMM_DESC,
"TEAMM_EMAIL": $scope.documentsProd[j].TEAMM_EMAIL,
"TEAMM_EXTENSION": $scope.documentsProd[j].TEAMM_EXTENSION,
"TEAMM_EZLYNX_USERNAME": $scope.documentsProd[j].TEAMM_EZLYNX_USERNAME,
"TEAMM_FAX": $scope.documentsProd[j].TEAMM_FAX,
"TEAMM_NAME": $scope.documentsProd[j].TEAMM_NAME,
"TEAMM_PHONE": $scope.documentsProd[j].TEAMM_PHONE,
"TEAMM_QUEUE": $scope.documentsProd[j].TEAMM_QUEUE,
"TEAMM_QUEUE_GROUP": $scope.documentsProd[j].TEAMM_QUEUE_GROUP,
"TEAMM_QUEUE_KILLED": $scope.documentsProd[j].TEAMM_QUEUE_KILLED,
"TEAMM_QUEUE_SENT": $scope.documentsProd[j].TEAMM_QUEUE_SENT,
"TEAMM_TYPE": $scope.documentsProd[j].TEAMM_TYPE
}]);
}
}
}
});
};
$scope.loadDataCombined = function(){
setTimeout(function(){
for (var i = 0; i < mLocalJson.length; i++) {
for (var j = 0; j < mProdJson.length; j++) {
if (mLocalJson[i].TEAM_ID == mProdJson[j].TEAMM_ID) {
mCombinedJson.push([{
"TEAM_GROUP_ID": mLocalJson[i].TEAM_GROUP_ID, // 1 - 7
"TEAM_GROUP_MEMBER_ID": mLocalJson[i].TEAM_GROUP_MEMBER_ID,
"TEAM_GROUP_DESC": mLocalJson[i].TEAM_GROUP_DESC, // Cereal Killers, Guns n Closes, etc...
"TEAM_GROUP_MEMBER_TYPE_DESC": $scope.documentsLocal[i].TEAM_GROUP_MEMBER_TYPE_DESC, // Director, Manager, Assistant Manager, Producer
"TEAM_ID": mProdJson[j].TEAMM_ID,
"TEAMM_DESC": mProdJson[j].TEAMM_DESC,
"TEAMM_EMAIL": mProdJson[j].TEAMM_EMAIL,
"TEAMM_EXTENSION": mProdJson[j].TEAMM_EXTENSION,
"TEAMM_EZLYNX_USERNAME": mProdJson[j].TEAMM_EZLYNX_USERNAME,
"TEAMM_FAX": mProdJson[j].TEAMM_FAX,
"TEAMM_NAME": mProdJson[j].TEAMM_NAME,
"TEAMM_PHONE": mProdJson[j].TEAMM_PHONE,
"TEAMM_QUEUE": mProdJson[j].TEAMM_QUEUE,
"TEAMM_QUEUE_GROUP": mProdJson[j].TEAMM_QUEUE_GROUP,
"TEAMM_QUEUE_KILLED": mProdJson[j].TEAMM_QUEUE_KILLED,
"TEAMM_QUEUE_SENT": mProdJson[j].TEAMM_QUEUE_SENT,
"TEAMM_TYPE": mProdJson[j].TEAMM_TYPE
}]);
}
}
}
}, 300);
$scope.lDc();
};
$scope.lDc = function(){
$scope.documentsCombined = mCombinedJson;
};

creating a service which holds values to be updated later with separate controllers

I am trying to create a service which holds values that I want to be able to update from other controllers. It's a fake financial tracker which allows me to update the values in this service. I can't get it to work and I know I may be setting it up incorrectly. Can someone help me out with this?
My code:
(function () {
"use strict";
var Bkbank = angular.module('Bkbank' []);
Bkbank.controller('dashboardCtlr', function ($scope, dashboardSrvs) {
/*User Data*/
$scope.userName = dashboardSrvs.userName;
$scope.acctNum = dashboardSrvs.acctNum;
$scope.startDate = dashboardSrvs.startDate;
$scope.checkingsTotal = dashboardSrvs.checkingsTotal;
$scope.savingsTotal = dashboardSrvs.savingsTotal;
$scope.investTotal = dashboardSrvs.investTotal;
$scope.ouncesUpdate = dashboardSrvs.ouncesUpdate;
$scope.debtBalance = dashboardSrvs.debtBalance;
$scope.goldSpot = dashboardSrvs.goldSpot;
/*Section Titles*/
$scope.userTitle = dashboardSrvs.userTitle;
$scope.servicesTitle = dashboardSrvs.servicesTitle;
$scope.checkingsTitle = dashboardSrvs.checkingsTitle;
$scope.savingsTitle = dashboardSrvs.savingsTitle;
$scope.investTitle = dashboardSrvs.investTitle;
$scope.debtTitle = dashboardSrvs.debtTitle;
$scope.savingsUpdateTitle = dashboardSrvs.savingsUpdateTitle;
});
Bkbank.service('dashboardSrvs', function () {
/*User Data*/
this.userName = "Tim Willson";
this.acctNum = 554887;
this.startDate = "01/12/75";
this.checkingsTotal = "56458.00";
this.savingsTotal = "98187.00";
this.investTotal = "34143.00";
this.ouncesUpdate = "30";
this.debtBalance = "10000.00";
this.goldSpot = "1138.10";
/*Section Titles*/
this.userTitle = "User";
this.servicesTitle = "Financial Services";
this.checkingsTitle = "Checkings";
this.savingsTitle = "Savings";
this.investTitle = "Investments";
this.debtTitle = "debt";
this.savingsUpdateTitle = "Update Savings Account";
});
}());
I am not fully clear with the question you have asked but what I understood is you want to get/set attribute values into service so that updates available to all the consumer controller(s). In such scenario you can create service as e.g.
app.service('dashboardSrvs', function() {
var userName = "Tim Willson"; //Set some default Value
return {
get userName() {
return userName;
},
set userName(val) {
userName = val;
}
}
});
And inside the controller you can update the userName as -
testService.userName = 'Mike Tyson';
Angular merge to the rescue!
I advise you in advance to read this great article about Angular copy / extend / merge objects. Article link here
var demoApp = angular.module("demoApp", []);
demoApp.service("dashboardService", function () {
var configObj = {
"userData": {
"userName": "A",
"acctNum": "B",
"startDate": "C"
}
};
return configObj;
});
demoApp.controller("appController", function ($scope, dashboardService) {
// Override the new values of the service with a new object
var newConfigValues = {
"userData": {
"userName": "X",
"acctNum": "Z"
}
};
var newConfigObj = angular.merge({}, dashboardService, newConfigValues);
console.log(newConfigObj); // "userName": "X", "acctNum": "Z", "startDate": "C"
});
As you can see, you can override all or just some values. If you do the latter, the original values in your service will be kept.
JSFiddle here

Firebase realtime data in not getting added properly in scope variable angularjs

This is firebase structure for categories2.
and this is for subcategories2.
To display data on screen I want $scope.Categories [] to be filled in this format.
[{
"id": "1",
"display_with": "7",
"image": "/images/salt_sugar.png",
"name": "Salt & Sugar",
"subcategories": [{
"scid": "1",
"scname": "Sugar Products"
},
{
"scid": "5",
"scname": "Tea"
}
]
},
.
.
.
.
]
Logic for filling $scope.Categories [].
$scope.Categories = [];
var categoriesRef = firebase.database().ref('categories2');
categoriesRef.on('value', function(snapshot) {
$scope.Categories = [];
var recvCategories = [];
recvCategories = snapshot.val();
for (var j=0; j<recvCategories.length; ++j){
var category = recvCategories[j];
//alert (category);
if (category != undefined){
var category_modified = {};
category_modified.id = category.id;
category_modified.display_with = category.display_with;
category_modified.name = category.name;
category_modified.image = category.image;
var subcategories = [];
for(var key in category.subcategories) {
var subcategoriesRef = firebase.database().ref('subcategories2/' + key);
subcategoriesRef.on('value', function(snapshot2) {
subcategories.push(snapshot2.val());
});
}
category_modified.subcategories = subcategories;
$scope.Categories.push(category_modified);
}
}
$scope.$apply();
});
As soon as data is available in want to display it on screen. so i am using $scope.$apply();
The problem is data is not displaying properly. but once i go to other controller and come back to same controller, everything displays as expected.
Why subcategories information is not adding up properly in $scope.Categories[]
I just modified your fiddle. just check the following link https://jsfiddle.net/py3ofkyc/8/
function myFunction() {
var subcategories = [];
var subcategoriesRef = firebase.database().ref('subcategories2');
subcategoriesRef.on('value', function(snapshot2) {
subcategories = snapshot2.val();
var Categories = [];
var categoriesRef = firebase.database().ref('categories2');
categoriesRef.on('value', function(snapshot) {
var Categories = [];
var recvCategories = [];
recvCategories = snapshot.val();
_(recvCategories).forEach(function(value) {
var category = value;
if (category != undefined){
var category_modified = {};
category_modified.id = category.id;
category_modified.display_with = category.display_with;
category_modified.name = category.name;
category_modified.image = category.image;
var _subcategories = [];
for(var key in category.subcategories) {
var data = _.filter(subcategories, { 'scid': key });
_subcategories.push(data[0]);
}
category_modified.subcategories = _subcategories;
Categories.push(category_modified);
}
});
console.log(Categories);
});
});
}`

How to write more than 25 items/rows into Table for DynamoDB?

I am quite new to Amazon DynamoDB. I currently have 20000 rows that I need to add to a table. However, based on what I've read, it seems that I can only write up to 25 rows at a time using BatchWriteItem class with 25 WriteRequests. Is it possible to increase this? How can I write more than 25 rows at a time? It is currently taking about 15 minutes to write all 20000 rows. Thank you.
You can only send up to 25 items in a single BatchWriteItem request, but you can send as many BatchWriteItem requests as you want at one time. Assuming you've provisioned enough write throughput, you should be able to speed things up significantly by splitting those 20k rows between multiple threads/processes/hosts and pushing them to the database in parallel.
It's maybe a bit heavyweight for that small of a dataset, but you can use AWS Data Pipeline to ingest data from S3. It basically automates the process of creating a Hadoop cluster to suck down your data from S3 and send it to DynamoDB in a bunch of parallel BatchWriteItem requests.
I was looking for some code to do this with the JavaScript SDK. I couldn't find it, so I put it together myself. I hope this helps someone else!
function multiWrite(table, data, cb) {
var AWS = require('aws-sdk');
var db = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for(var x in data) {
// Add the item to the current batch
item_count++;
current_batch.push({
PutRequest: {
Item: data[x]
}
});
// If we've added 25 items, add the current batch to the batches array
// and reset it
if(item_count%25 == 0) {
batches.push(current_batch);
current_batch = [];
}
}
// Add the last batch if it has records and is not equal to 25
if(current_batch.length > 0 && current_batch.length != 25) batches.push(current_batch);
// Handler for the database operations
var completed_requests = 0;
var errors = false;
function handler(request) {
return function(err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
}
// Make the callback if we've completed all the requests
if(completed_requests == batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for(x in batches) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}';
params = JSON.parse(params);
params.RequestItems[table] = batches[x];
// Perform the batchWrite operation
db.batchWrite(params, handler(params));
}
}
function putInHistory(data,cb) {
var arrayOfArray25 = _.chunk(data, 25);
async.every(arrayOfArray25, function(arrayOf25, callback) {
var params = {
RequestItems: {
[TABLES.historyTable]: []
}
};
arrayOf25.forEach(function(item){
params.RequestItems[TABLES.historyTable].push({
PutRequest: {
Item: item
}
})
});
docClient.batchWrite(params, function(err, data) {
if (err){
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, true);
};
});
}, function(err, result) {
if(err){
cb(err);
} else {
if(result){
cb(null,{allWritten:true});
} else {
cb(null,{allWritten:false});
}
}
});
}
You can use lodash to make chunks of data from the array and then use async library's each/every method to do a batchWrite on chunks of 25 elements
Using aws cli and aws-vault, this is what I do.
Let's imagine you have the following file (data.json) with 1000 rows
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}},
... to 1000
and you need to split it into chunk files with 25 rows in each!
I use the following c# code in LinqPad to generate the .sh file and json chunks to be able to insert them into dynamodb using aws cli
void Main()
{
var sourcePath= #"D:\data\whereYourMainJsonFileIsLocated\";
var sourceFilePath = #"data.json";
var awsVaultProfileName = "dev";
var env = "dev";
var tableName = "dynamodb-table-name";
var lines = System.IO.File.ReadAllLines(sourcePath + sourceFilePath);
var destinationPath = Path.Combine(sourcePath, env);
var destinationChunkPath = Path.Combine(sourcePath, env, "chunks");
if (!System.IO.Directory.Exists(destinationChunkPath))
System.IO.Directory.CreateDirectory(destinationChunkPath);
System.Text.StringBuilder shString= new System.Text.StringBuilder();
for (int i = 0; i < lines.Count(); i = i+25)
{
var pagedLines = lines.Skip(i).Take(25).ToList().Distinct().ToList();
System.Text.StringBuilder sb = new System.Text.StringBuilder();
sb.AppendLine("{");
sb.AppendLine($" \"{tableName}\": [");
foreach (var element in pagedLines)
{
if (element == pagedLines.Last())
sb.AppendLine(element.Substring(0, element.Length-1));
else
sb.AppendLine(element);
}
sb.AppendLine("]");
sb.AppendLine("}");
var fileName = $"chunk{i / 25}.json";
System.IO.File.WriteAllText(Path.Combine(destinationChunkPath, fileName), sb.ToString(), Encoding.Default);
shString.AppendLine($#"aws-vault.exe exec {awsVaultProfileName} -- aws dynamodb batch-write-item --request-items file://chunks/{fileName}");
}
System.IO.File.WriteAllText(Path.Combine(destinationPath, $"{tableName}-{env}.sh"), shString.ToString(), Encoding.Default);
}
the result would be chunk files as chunk0.json, chunk1.json, etc
{
"dynamodb-table-name": [
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}}
]
}
and .sh file
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk0.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk1.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk2.json
and finally just run the .sh file and you have all data in your table!
From the answer from #Geerek here is the solution with a lambda function:
exports.handler = (event, context, callback) => {
console.log(`EVENT: ${JSON.stringify(event)}`);
var AWS = require('aws-sdk');
AWS.config.update({ region: process.env.REGION })
var docClient = new AWS.DynamoDB.DocumentClient();
const {data, table, cb} = event
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for (var i = 0; i < data.length; i++) {
// Add the item to the current batch
item_count++
current_batch.push({
PutRequest: {
Item: data[i],
},
})
// If we've added 25 items, add the current batch to the batches array
// and reset it
if (item_count % 25 === 0) {
batches.push(current_batch)
current_batch = []
}
}
// Add the last batch if it has records and is not equal to 25
if (current_batch.length > 0 && current_batch.length !== 25) {
batches.push(current_batch)
}
// Handler for the database operations
var completed_requests = 0
var errors = false
function handler (request) {
console.log('in the handler: ', request)
return function (err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
callback(err);
}else {
callback(null, data);
}
// Make the callback if we've completed all the requests
if(completed_requests === batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for (var j = 0; j < batches.length; j++) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}'
params = JSON.parse(params)
params.RequestItems[table] = batches[j]
console.log('before db.batchWrite: ', params)
// Perform the batchWrite operation
docClient.batchWrite(params, handler(params))
}
};
I wrote an npm package that should work as a simple drop-in replacement for the batchWrite method, you just need to pass the dynamoDB instance as the first parameter and things should work:
https://www.npmjs.com/package/batch-write-all
Check the example in the project readme file:
// Use bellow instead of this: dynamodb.batchWrite(params).promise();
batchWriteAll(dynamodb, params).promise();
const { dynamoClient } = require("./resources/db");
const { v4: uuid } = require("uuid");
const batchWriteLooper = async () => {
let array = [];
for (let i = 0; i < 2000; i++) {
array.push({
PutRequest: {
Item: {
personId: uuid(),
name: `Person ${i}`,
age: Math.floor(Math.random() * 100),
gender: "Male",
createdAt: new Date(),
updatedAt: new Date(),
},
},
});
}
var perChunk = 20; // items per chunk
var result = array.reduce((resultArray, item, index) => {
const chunkIndex = Math.floor(index / perChunk);
if (!resultArray[chunkIndex]) {
resultArray[chunkIndex] = []; // start a new chunk
}
resultArray[chunkIndex].push(item);
return resultArray;
}, []);
Promise.all(
result.map(async (chunk) => {
const params = {
RequestItems: {
"persons": chunk,
},
};
return await dynamoClient.batchWrite(params).promise();
})
).then(() => {
console.log("done");
});
};
batchWriteLooper();

Resources