NodeJS MSSQL multiple queries promised - sql-server

I have a lot of problem using the mssql npm library.
Here is my db class :
"use strict"
const mssql = require('mssql');
const moment = require("moment");
let pool_historian = null;
let connexion_historian = null;
function mapBarChart(array) {
return new Promise((resolve, reject) => {
let result = {}
result.old = []
result.this = []
let currentYear = null;
for (let i = 0; i < array.length; i++) {
if (parseInt(moment().format("YYYY")) !== array[i].Annees) {
result.old.push(array[i]);
} else {
result.this.push(array[i]);
}
}
resolve(result);
})
};
class Historian {
constructor(data) {
pool_historian = new mssql.ConnectionPool({
server: data.host,
user: data.username,
password: data.password,
database: data.historian_db,
pool: {
max: 50,
min: 1
}
});
}
getBarChart(sensor, from, to) {
return pool_historian.connect().then(connector => {
return connector.query`SELECT Annees=YEAR(DateTime),Mois=MONTH(DateTime), Valeur=ROUND(sum(AnalogHistory.Value),2) FROM AnalogHistory WHERE AnalogHistory.TagName IN (${sensor}) AND Quality = 0 AND wwVersion = 'Latest' AND wwRetrievalMode = 'Full' AND DateTime >= ${from} AND DateTime <= ${to} AND AnalogHistory.Value > 0 GROUP BY YEAR(AnalogHistory.DateTime),MONTH(AnalogHistory.DateTime) ORDER BY Annees, Mois`.then(result => {
connector.close();
return mapBarChart(result.recordset).then(result => { return result });
//return result.recordset;
}).catch(err => {
return err;
})
})
}
getLineChart() {
return pool_historian.connect().then(connector => {
let variable = "A_000000000000000000000000000045.PV";
return connector.query`SELECT Annees=YEAR(DateTime),Mois=MONTH(DateTime),day=DAY(DateTime), Valeur=ROUND(sum(AnalogHistory.Value),2) FROM AnalogHistory WHERE AnalogHistory.TagName IN (${variable}) AND Quality = 0 AND wwVersion = 'Latest' AND wwRetrievalMode = 'Cyclic' AND DateTime >= '20160101 00:00:00.000' AND DateTime <= '20170809 00:00:00.000' AND AnalogHistory.Value > 0 GROUP BY YEAR(AnalogHistory.DateTime),MONTH(AnalogHistory.DateTime), Day(AnalogHistory.DateTime)ORDER BY Annees, Mois`.then(result => {
connector.close();
return result.recordset;
}).catch(err => {
return err;
})
})
}
close() {
pool_historian.close()
}
}
This class is used in this "business class" :
const Historian = require(`${__dirname}/historian-query`)
const Fdedb = require(`${__dirname}/fdedb-query`)
const moment = require('moment');
moment.locale("fr-FR");
class Graph_Tasks {
constructor() {
this.historian = new Historian({ host: "192.168.1.16", username: "******", password: "w***", historian_db: "R******e" })
this.fdedb = new Fdedb({ host: "192.168.1.16", username: "*****", password: "*****", fde_db: "S*****" })
}
createGraphForBuilding(code) {
return new Promise((resolve, reject) => {
this.fdedb.getList(code).then(list => {
console.log(list)
let datas = [];
//Foreach item on the list perform these 2 queryes
Promise.all([this.historian.getLineChart("A_000000000000000000000000000045.PV", moment().subtract(1, "years").startOf("year").format(), moment().format()), this.historian.getBarChart("A_000000000000000000000000000045.PV", moment().subtract(1, "years").startOf("year").format(), moment().format())]).then(results => {
let datas = []
datas = { "lineChart": null, "barChart": results[0] };
console.log(datas)
res.render('index', { title: 'WebGraph', message: 'Yo Yo', datas });
})
console.log(datas)
resolve(datas)
}).catch(console.log);
});
}
}
module.exports = Graph_Tasks;
As you can see, what I'm trying to do is performing a simultaneous database request. As I read in the documentation, the connection pool must let me do this properly. So when the program arrives at Promise.all, I expected that the 2 requests will be launched simultaneously.
But I get an error :
Une exception s'est produite : Error
Promise Rejection (ConnectionError: Already connecting to database! Call close before connecting to different database.)
ConnectionError: Already connecting to database! Call close before connecting to different database.
at ConnectionError (d:\repositories\fde\node_modules\mssql\lib\base.js:1428:7)
at ConnectionPool._connect (d:\repositories\fde\node_modules\mssql\lib\base.js:235:23)
at EventEmitter.connect.PromiseLibrary (d:\repositories\fde\node_modules\mssql\lib\base.js:217:19)
at ConnectionPool.connect (d:\repositories\fde\node_modules\mssql\lib\base.js:216:12)
at Historian.getBarChart (d:\repositories\fde\class\historian-query.js:39:31)
at __dirname.createGraphForBuilding.Promise.fdedb.getList.then.list (d:\repositories\fde\class\graph_tasks.js:21:188)
at process._tickCallback (internal/process/next_tick.js:109:7)
So my question is: how to adapt the code to let me perform several queries at the same time (the promise.all for each of my list item)?

The issue is that you can't open multiple connection pools to the same server (I assume Fdedb is opening another connection because you didn't include code for it). If, say, you were pulling that data from two different servers then opening two connection pools would be appropriate- I've run into that use case before. But it looks like your two databases are on the same server (localhost), so it's better to open just the one connection and pass it to your objects to make the query. You can touch multiple databases on the same host using plain old SQL, see: How do I query tables located in different database?

Related

Attach files to record in Netsuite

I am transferring attachments from Zoho to Netsuite. But facing problems while attaching it to opportunity or any other object. I have already uploaded the file to the file cabinet in netsuite and tried to bind it with the records notes. But that doesn't work. It only adds the note to the record but no sign of any file in the file option.
Thank you.
enter image description here
You would use the record.attach function. You would need the internal id of the file and of the transaction. In SS1 (using nlapiAttachRecord) it was important to list the file arguments first. The SS2 syntax makes that clearer:
record.attach({
record:{
type:'file',
id:fileid
},
to:{
type:'transaction',
id:transactionid
}
});
/**
* #NApiVersion 2.1
* #NScriptType MapReduceScript
* #NModuleScope SameAccount
*/
/**
* In this I am using Map Reduce script to process & attach multiple files from
* FileCabinet of NetSuite. So that it never goes out of governance.
/
define(['N/record','N/query'],
(record,query) => {
const getInputData = (getInputDataContext) => {
try
{
/**
* Query for getting transaction ID & other header detail of record.
*/
let transQuery = "SELECT custrecord_rf_tid as tid, custrecord_rf_fid as fid, id FROM customrecord_rflink where custrecord_rf_comp <> 'T' and custrecord_rf_type = 11";
let transQueryResult = runSuiteQuery(transQuery);
if(transQueryResult.length > 0){
log.debug("Count of record left to process--->", transQueryResult.length);
return transQueryResult;
}else{ //Incase where no transaction was left to transform.
log.debug({title: "No Remaining Transaction!"});
return 1;
}
}
catch (e)
{
log.error({title: "Error inside getinput data.", details: [e.message,e.stack]});
}
}
const map = (mapContext) => {
try{
let mapData = JSON.parse(mapContext.value);
log.debug({title: "mapData after parse", details: mapData});
let staginRecId = Number(mapData.id);
let fileId = Number(mapData.fid);
let billId = Number(mapData.tid);
let outputVal = attachfile('file',fileId, 'inventoryadjustment', billId);
let staginRec;
if(outputVal === true){
staginRec = record.submitFields({
type: 'customrecord_rflink',
id: staginRecId,
values: {
'custrecord_rf_comp': true
}
});
log.debug("record saved with id-->", staginRecId);
}else{
log.debug("record saving failed with id-->", staginRecId);
}
}
catch(e){
log.error({title: "Error in Map", details: [e.message,e.stack]});
}
}
const reduce = (reduceContext) => {
}
const summarize = (summarizeContext) => {
log.debug('Summarize completed');
}
function runSuiteQuery(queryString) {
log.debug("Query", queryString);
let resultSet = query.runSuiteQL({
query: queryString
});
log.debug("Query wise Data", resultSet.asMappedResults());
if(resultSet && resultSet.results && resultSet.results.length > 0) {
return resultSet.asMappedResults();
} else {
return [];
}
}
function attachfile(recType, recId, recTypeTo, recIdTo) {
record.attach({
record: {
type: recType,
id: recId
},
to: {
type: recTypeTo,
id: recIdTo
}
});
return true;
}
return {getInputData,map,reduce,summarize};
});

Is there a way to insert object into DB using node.js mssql module

I found a few examples to insert objects into DB directly by doing something like:
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'cccc.net',
user : 'username',
password : 'password',
});
var post = {srcUserID: userSrcID, destUserID: msg.userid, messageContent: msg.txt, messageSendDate:sendDate };
connection.query('INSERT INTO messages SET ?', post, function(err, result) {
});
But this works with mysql module and I have SQL Server as my DB so I am using
var SQL_DB = require('mssql');
Is there a way to insert objects directly to DB in the same way how its possible with mysql module.
looking at node-mssql v4 docs, they used this as an example using callbacks:
const sql = require('mssql');
const config = {
user: '...',
password: '...',
server: 'localhost',
database: '...',
pool: {
max: 10,
min: 0,
idleTimeoutMillis: 30000
}
};
const pool = new sql.ConnectionPool(config);
const transaction = new sql.Transaction(pool);
transaction.begin(err => {
// ... error checks
const request = new sql.Request(transaction)
request.query('insert into mytable (mycolumn) values (12345)', (err, result) => {
// ... error checks
transaction.commit(err => {
// ... error checks
console.log("Transaction committed.")
})
})
})
EDIT: node-mssql seems to use request.input for escaping values but does not accept a js object, you could quickly make your own:
sql.connect(config, err => {
var post = {
srcUserID: userSrcID,
destUserID: msg.userid,
messageContent: msg.txt,
messageSendDate: sendDate
};
const request = new sql.Request();
let cols = [];
let inputs = [];
for (let k in post) {
request.input(k, post[k]);
cols.push(k);
inputs.push('#' + k);
}
let query = `insert into messages (${cols.toString()}) values (${inputs.toString()})`;
request.query(query, (err, result) => {
//stuff here
});
});

Node.js SQL server crashes when receiving multiple requests

I have a NodeJS application which is my server and I created a Database class to help me handle querying my SQL DB. If I send requests a second between each other, everything runs fine.. no problems.. But if I start spamming requests to my server it crashes due to Error: Cannot enqueue Quit after invoking quit.
Here's my query function inside my Database class
static query(query: string): Promise<any> {
console.log('Query: ' + query);
return new Promise((resolve, reject) => {
this.connect().then(success => {
sqlConn.query(query, (err, results) => {
if (err) { return reject(err);
} else {
return resolve(results);
}
});
}).catch(err => {
return reject(err);
}).then( () => {
if (sqlConn.state !== 'disconnected') {
sqlConn.end();
}
});
});
};
and here's the this.connect() function
static connect(): Promise<any> {
return new Promise((resolve, reject) => {
sqlConn = mysql.createConnection(this.connectionData);
sqlConn.connect(err => {
if (err) { return reject(err); } else {
return resolve('SQL connection established');
}
});
});
};
I'm pretty sure the problem appears sometimes, it would still be
processing one query, and then another query comes before the first
one finishes, so it would call sqlConn.end() twice, even when it's
already disconnected? Any help is greatly appreciated...
> Main goal is for the query to wait till it's 100% done before it runs
the next one..
You can simplify your code by using the npm module mysql and use it's built-in connection pool.
From the documentation:
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 10,
host : 'example.org',
user : 'bob',
password : 'secret',
database : 'my_db'
});
pool.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
});
You can, of course, create your own function that promisifies that call like this:
function query (sql) {
return new Promise((resolve, reject) => {
pool.query(sql, (error, results, fields) =>
error ? reject(error) : resolve({ results, fields });
};
}
If you really wants to use this approach then please use eachSeries function of async library.
var chunkedArray= [];
async.eachSeries(chunkedArray, startUpload, endUpload);
funtion startUpload(data,cb){
//iterate over every single item in array 1 at a time
}
function endUplaod(err){
//finally call this
}
This might help:-
https://caolan.github.io/async/docs.html#eachSeries
But i rather suggest you to use pooling of connection which make less overhead on your db and you can use your mysql more efficiently then making multiple connection.
// Load module
var mysql = require('mysql');
// Initialize pool
var pool = mysql.createPool({
connectionLimit : 10,
host : '127.0.0.1',
user : 'root',
password : 'root',
database : 'db_name',
debug : false
});
module.exports = pool;

Node Express Multiple SQL server Connection

I need to connect to diferent databases on direfent servers.
The servers are Microsoft SQL Server.
I do it like this:
dbconfig.js
var sql1 = require('mssql')
var sql2 = require('mssql')
var conn1 = {server:"SERVER IP", database:"db1", user:"foo", password:"foo", port:1433}
var conn2= {server:"SERVER2 IP", database:"db2", user:"foo2", password:"foo2", port:1433}
var server1= sql1.connect(conn1)
.then(function() { debug('Connected'); })
.catch(function(err) { debug('Error connect SQL Server', err); });
var server2= sql2.connect(conn2)
.then(function() { debug('Connected'); })
.catch(function(err) { debug('Error connect SQL Server', err); });
module.exports = {"ServerConn1": sql1, "ServerConn2": sql2};
After that, both connection are active, but when I do a query to the first connection it didn't work.
The error is Invalid object name 'FooDatabase.dbo.fooTable'.
Can anyone help me to solve this issue?
Thanks!
I implement using MySQL you can do the same thing mssql by passing empty database parameter and letter update database before creates connection.
And you do not need to import two-times just update the DB name before creating connection or query.
const express =
require('express');
const app = express();
const port = process.env.PORT || 80;
var http = require('http');
var mysql = require('mysql')
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : '',//here i am not passing db and db is undefined
});
app.get('/db1',function(req,res)
{
connection.config.database="task" //here i updating db name before query
connection.query('SELECT * FROM tasks', function (error, results, fields) {
console.log(results)
res.json(fields)
connection.end()
})
})
app.get('/db2',function(req,res)
{
connection.config.database="cg_taskview" //db2
connection.query('SELECT * FROM tasks', function (error, results, fields) {
if (error)
console.log(error);
console.log(results)
res.json(fields)
});
connection.end()
})
var server = http.createServer(app);
server.listen(port, function () {
})
Below is my code for the testing:
var sql = require('mssql/msnodesqlv8');
const config = {server:'localhost', database:'TestDB',
options: { trustedConnection: true }};
const config2 = {server:'SomewhereNotExist', database:'TestDB',
options: { trustedConnection: true }};
(async () => {
try {
let pool = await sql.connect(config);
let result = await pool.request().query('select count(1) as cnt from AlarmWithLastStatus');
console.log('DB1 result:');
console.dir(result.recordset);
let pool2 = await sql.connect(config2);
let result2 = await pool2.request().query('select count(1) as cnt from AlarmWithLastStatus');
console.log('DB2 result:');
console.dir(result2.recordset);
} catch (err) {
if (err) console.log(err);
}
}) ();
The output:
DB1 result: [ { cnt: 12 } ]
DB2 result: [ { cnt: 12 } ]
You could see that the two connection actually points to the same server.
If you change the second query to a table that does not exist in this server, that will generate the error you got.
I started experiencing a similar problem when a second MSSQL server was added as a data source to the project ... Fortunately, I found a solution in the examples for tediousjs.
Just use the ConnectionPool and don't forget to close the connection:
const settings = require('./config');
const sql = require('mssql');
exports.someSqlQuery = async function(sqlQuery) {
const cPool = new sql.ConnectionPool(config);
cPool.on('error', err => console.log('---> SQL Error: ', err));
try {
await cPool.connect();
let result = await cPool.request().query(sqlQuery);
return {data: result};
} catch (err) {
return {error: err};
} finally {
cPool.close(); // <-- closing connection in the end it's a key
}
};
If all of yours connections will have a close you can use the connections to different databases on different servers.

How to write more than 25 items/rows into Table for DynamoDB?

I am quite new to Amazon DynamoDB. I currently have 20000 rows that I need to add to a table. However, based on what I've read, it seems that I can only write up to 25 rows at a time using BatchWriteItem class with 25 WriteRequests. Is it possible to increase this? How can I write more than 25 rows at a time? It is currently taking about 15 minutes to write all 20000 rows. Thank you.
You can only send up to 25 items in a single BatchWriteItem request, but you can send as many BatchWriteItem requests as you want at one time. Assuming you've provisioned enough write throughput, you should be able to speed things up significantly by splitting those 20k rows between multiple threads/processes/hosts and pushing them to the database in parallel.
It's maybe a bit heavyweight for that small of a dataset, but you can use AWS Data Pipeline to ingest data from S3. It basically automates the process of creating a Hadoop cluster to suck down your data from S3 and send it to DynamoDB in a bunch of parallel BatchWriteItem requests.
I was looking for some code to do this with the JavaScript SDK. I couldn't find it, so I put it together myself. I hope this helps someone else!
function multiWrite(table, data, cb) {
var AWS = require('aws-sdk');
var db = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for(var x in data) {
// Add the item to the current batch
item_count++;
current_batch.push({
PutRequest: {
Item: data[x]
}
});
// If we've added 25 items, add the current batch to the batches array
// and reset it
if(item_count%25 == 0) {
batches.push(current_batch);
current_batch = [];
}
}
// Add the last batch if it has records and is not equal to 25
if(current_batch.length > 0 && current_batch.length != 25) batches.push(current_batch);
// Handler for the database operations
var completed_requests = 0;
var errors = false;
function handler(request) {
return function(err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
}
// Make the callback if we've completed all the requests
if(completed_requests == batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for(x in batches) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}';
params = JSON.parse(params);
params.RequestItems[table] = batches[x];
// Perform the batchWrite operation
db.batchWrite(params, handler(params));
}
}
function putInHistory(data,cb) {
var arrayOfArray25 = _.chunk(data, 25);
async.every(arrayOfArray25, function(arrayOf25, callback) {
var params = {
RequestItems: {
[TABLES.historyTable]: []
}
};
arrayOf25.forEach(function(item){
params.RequestItems[TABLES.historyTable].push({
PutRequest: {
Item: item
}
})
});
docClient.batchWrite(params, function(err, data) {
if (err){
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, true);
};
});
}, function(err, result) {
if(err){
cb(err);
} else {
if(result){
cb(null,{allWritten:true});
} else {
cb(null,{allWritten:false});
}
}
});
}
You can use lodash to make chunks of data from the array and then use async library's each/every method to do a batchWrite on chunks of 25 elements
Using aws cli and aws-vault, this is what I do.
Let's imagine you have the following file (data.json) with 1000 rows
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}},
... to 1000
and you need to split it into chunk files with 25 rows in each!
I use the following c# code in LinqPad to generate the .sh file and json chunks to be able to insert them into dynamodb using aws cli
void Main()
{
var sourcePath= #"D:\data\whereYourMainJsonFileIsLocated\";
var sourceFilePath = #"data.json";
var awsVaultProfileName = "dev";
var env = "dev";
var tableName = "dynamodb-table-name";
var lines = System.IO.File.ReadAllLines(sourcePath + sourceFilePath);
var destinationPath = Path.Combine(sourcePath, env);
var destinationChunkPath = Path.Combine(sourcePath, env, "chunks");
if (!System.IO.Directory.Exists(destinationChunkPath))
System.IO.Directory.CreateDirectory(destinationChunkPath);
System.Text.StringBuilder shString= new System.Text.StringBuilder();
for (int i = 0; i < lines.Count(); i = i+25)
{
var pagedLines = lines.Skip(i).Take(25).ToList().Distinct().ToList();
System.Text.StringBuilder sb = new System.Text.StringBuilder();
sb.AppendLine("{");
sb.AppendLine($" \"{tableName}\": [");
foreach (var element in pagedLines)
{
if (element == pagedLines.Last())
sb.AppendLine(element.Substring(0, element.Length-1));
else
sb.AppendLine(element);
}
sb.AppendLine("]");
sb.AppendLine("}");
var fileName = $"chunk{i / 25}.json";
System.IO.File.WriteAllText(Path.Combine(destinationChunkPath, fileName), sb.ToString(), Encoding.Default);
shString.AppendLine($#"aws-vault.exe exec {awsVaultProfileName} -- aws dynamodb batch-write-item --request-items file://chunks/{fileName}");
}
System.IO.File.WriteAllText(Path.Combine(destinationPath, $"{tableName}-{env}.sh"), shString.ToString(), Encoding.Default);
}
the result would be chunk files as chunk0.json, chunk1.json, etc
{
"dynamodb-table-name": [
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}}
]
}
and .sh file
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk0.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk1.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk2.json
and finally just run the .sh file and you have all data in your table!
From the answer from #Geerek here is the solution with a lambda function:
exports.handler = (event, context, callback) => {
console.log(`EVENT: ${JSON.stringify(event)}`);
var AWS = require('aws-sdk');
AWS.config.update({ region: process.env.REGION })
var docClient = new AWS.DynamoDB.DocumentClient();
const {data, table, cb} = event
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for (var i = 0; i < data.length; i++) {
// Add the item to the current batch
item_count++
current_batch.push({
PutRequest: {
Item: data[i],
},
})
// If we've added 25 items, add the current batch to the batches array
// and reset it
if (item_count % 25 === 0) {
batches.push(current_batch)
current_batch = []
}
}
// Add the last batch if it has records and is not equal to 25
if (current_batch.length > 0 && current_batch.length !== 25) {
batches.push(current_batch)
}
// Handler for the database operations
var completed_requests = 0
var errors = false
function handler (request) {
console.log('in the handler: ', request)
return function (err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
callback(err);
}else {
callback(null, data);
}
// Make the callback if we've completed all the requests
if(completed_requests === batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for (var j = 0; j < batches.length; j++) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}'
params = JSON.parse(params)
params.RequestItems[table] = batches[j]
console.log('before db.batchWrite: ', params)
// Perform the batchWrite operation
docClient.batchWrite(params, handler(params))
}
};
I wrote an npm package that should work as a simple drop-in replacement for the batchWrite method, you just need to pass the dynamoDB instance as the first parameter and things should work:
https://www.npmjs.com/package/batch-write-all
Check the example in the project readme file:
// Use bellow instead of this: dynamodb.batchWrite(params).promise();
batchWriteAll(dynamodb, params).promise();
const { dynamoClient } = require("./resources/db");
const { v4: uuid } = require("uuid");
const batchWriteLooper = async () => {
let array = [];
for (let i = 0; i < 2000; i++) {
array.push({
PutRequest: {
Item: {
personId: uuid(),
name: `Person ${i}`,
age: Math.floor(Math.random() * 100),
gender: "Male",
createdAt: new Date(),
updatedAt: new Date(),
},
},
});
}
var perChunk = 20; // items per chunk
var result = array.reduce((resultArray, item, index) => {
const chunkIndex = Math.floor(index / perChunk);
if (!resultArray[chunkIndex]) {
resultArray[chunkIndex] = []; // start a new chunk
}
resultArray[chunkIndex].push(item);
return resultArray;
}, []);
Promise.all(
result.map(async (chunk) => {
const params = {
RequestItems: {
"persons": chunk,
},
};
return await dynamoClient.batchWrite(params).promise();
})
).then(() => {
console.log("done");
});
};
batchWriteLooper();

Resources