Attach files to record in Netsuite - file

I am transferring attachments from Zoho to Netsuite. But facing problems while attaching it to opportunity or any other object. I have already uploaded the file to the file cabinet in netsuite and tried to bind it with the records notes. But that doesn't work. It only adds the note to the record but no sign of any file in the file option.
Thank you.
enter image description here

You would use the record.attach function. You would need the internal id of the file and of the transaction. In SS1 (using nlapiAttachRecord) it was important to list the file arguments first. The SS2 syntax makes that clearer:
record.attach({
record:{
type:'file',
id:fileid
},
to:{
type:'transaction',
id:transactionid
}
});

/**
* #NApiVersion 2.1
* #NScriptType MapReduceScript
* #NModuleScope SameAccount
*/
/**
* In this I am using Map Reduce script to process & attach multiple files from
* FileCabinet of NetSuite. So that it never goes out of governance.
/
define(['N/record','N/query'],
(record,query) => {
const getInputData = (getInputDataContext) => {
try
{
/**
* Query for getting transaction ID & other header detail of record.
*/
let transQuery = "SELECT custrecord_rf_tid as tid, custrecord_rf_fid as fid, id FROM customrecord_rflink where custrecord_rf_comp <> 'T' and custrecord_rf_type = 11";
let transQueryResult = runSuiteQuery(transQuery);
if(transQueryResult.length > 0){
log.debug("Count of record left to process--->", transQueryResult.length);
return transQueryResult;
}else{ //Incase where no transaction was left to transform.
log.debug({title: "No Remaining Transaction!"});
return 1;
}
}
catch (e)
{
log.error({title: "Error inside getinput data.", details: [e.message,e.stack]});
}
}
const map = (mapContext) => {
try{
let mapData = JSON.parse(mapContext.value);
log.debug({title: "mapData after parse", details: mapData});
let staginRecId = Number(mapData.id);
let fileId = Number(mapData.fid);
let billId = Number(mapData.tid);
let outputVal = attachfile('file',fileId, 'inventoryadjustment', billId);
let staginRec;
if(outputVal === true){
staginRec = record.submitFields({
type: 'customrecord_rflink',
id: staginRecId,
values: {
'custrecord_rf_comp': true
}
});
log.debug("record saved with id-->", staginRecId);
}else{
log.debug("record saving failed with id-->", staginRecId);
}
}
catch(e){
log.error({title: "Error in Map", details: [e.message,e.stack]});
}
}
const reduce = (reduceContext) => {
}
const summarize = (summarizeContext) => {
log.debug('Summarize completed');
}
function runSuiteQuery(queryString) {
log.debug("Query", queryString);
let resultSet = query.runSuiteQL({
query: queryString
});
log.debug("Query wise Data", resultSet.asMappedResults());
if(resultSet && resultSet.results && resultSet.results.length > 0) {
return resultSet.asMappedResults();
} else {
return [];
}
}
function attachfile(recType, recId, recTypeTo, recIdTo) {
record.attach({
record: {
type: recType,
id: recId
},
to: {
type: recTypeTo,
id: recIdTo
}
});
return true;
}
return {getInputData,map,reduce,summarize};
});

Related

discordjs Embed won't show up

Ok, so i'm trying to make a push notification for my discord.
i found this script online.
but it will not post the embed....
This is my monitor code:
TwitchMonitor.onChannelLiveUpdate((streamData) => {
const isLive = streamData.type === "live";
// Refresh channel list
try {
syncServerList(false);
} catch (e) { }
// Update activity
StreamActivity.setChannelOnline(streamData);
// Generate message
const msgFormatted = `${streamData.user_name} is nu live op twitch <:bday:967848861613826108> kom je ook?`;
const msgEmbed = LiveEmbed.createForStream(streamData);
// Broadcast to all target channels
let anySent = false;
for (let i = 0; i < targetChannels.length; i++) {
const discordChannel = targetChannels[i];
const liveMsgDiscrim = `${discordChannel.guild.id}_${discordChannel.name}_${streamData.id}`;
if (discordChannel) {
try {
// Either send a new message, or update an old one
let existingMsgId = messageHistory[liveMsgDiscrim] || null;
if (existingMsgId) {
// Fetch existing message
discordChannel.messages.fetch(existingMsgId)
.then((existingMsg) => {
existingMsg.edit(msgFormatted, {
embed: msgEmbed
}).then((message) => {
// Clean up entry if no longer live
if (!isLive) {
delete messageHistory[liveMsgDiscrim];
liveMessageDb.put('history', messageHistory);
}
});
})
.catch((e) => {
// Unable to retrieve message object for editing
if (e.message === "Unknown Message") {
// Specific error: the message does not exist, most likely deleted.
delete messageHistory[liveMsgDiscrim];
liveMessageDb.put('history', messageHistory);
// This will cause the message to be posted as new in the next update if needed.
}
});
} else {
// Sending a new message
if (!isLive) {
// We do not post "new" notifications for channels going/being offline
continue;
}
// Expand the message with a #mention for "here" or "everyone"
// We don't do this in updates because it causes some people to get spammed
let mentionMode = (config.discord_mentions && config.discord_mentions[streamData.user_name.toLowerCase()]) || null;
if (mentionMode) {
mentionMode = mentionMode.toLowerCase();
if (mentionMode === "Nu-Live") {
// Reserved # keywords for discord that can be mentioned directly as text
mentionMode = `#${mentionMode}`;
} else {
// Most likely a role that needs to be translated to <#&id> format
let roleData = discordChannel.guild.roles.cache.find((role) => {
return (role.name.toLowerCase() === mentionMode);
});
if (roleData) {
mentionMode = `<#&${roleData.id}>`;
} else {
console.log('[Discord]', `Cannot mention role: ${mentionMode}`,
`(does not exist on server ${discordChannel.guild.name})`);
mentionMode = null;
}
}
}
let msgToSend = msgFormatted;
if (mentionMode) {
msgToSend = msgFormatted + ` ${mentionMode}`
}
let msgOptions = {
embed: msgEmbed
};
discordChannel.send(msgToSend, msgOptions)
.then((message) => {
console.log('[Discord]', `Sent announce msg to #${discordChannel.name} on ${discordChannel.guild.name}`)
messageHistory[liveMsgDiscrim] = message.id;
liveMessageDb.put('history', messageHistory);
})
.catch((err) => {
console.log('[Discord]', `Could not send announce msg to #${discordChannel.name} on ${discordChannel.guild.name}:`, err.message);
});
}
anySent = true;
} catch (e) {
console.warn('[Discord]', 'Message send problem:', e);
}
}
}
liveMessageDb.put('history', messageHistory);
return anySent;
});
This is the embed code:
const Discord = require('discord.js');
const moment = require('moment');
const humanizeDuration = require("humanize-duration");
const config = require('../data/config.json');
class LiveEmbed {
static createForStream(streamData) {
const isLive = streamData.type === "live";
const allowBoxArt = config.twitch_use_boxart;
let msgEmbed = new Discord.MessageEmbed();
msgEmbed.setColor(isLive ? "RED" : "BLACK");
msgEmbed.setURL(`https://twitch.tv/${(streamData.login || streamData.user_name).toLowerCase()}`);
// Thumbnail
let thumbUrl = streamData.profile_image_url;
if (allowBoxArt && streamData.game && streamData.game.box_art_url) {
thumbUrl = streamData.game.box_art_url;
thumbUrl = thumbUrl.replace("{width}", "288");
thumbUrl = thumbUrl.replace("{height}", "384");
}
msgEmbed.setThumbnail(thumbUrl);
if (isLive) {
// Title
msgEmbed.setTitle(`:red_circle: **${streamData.user_name} is live op Twitch!**`);
msgEmbed.addField("Title", streamData.title, false);
} else {
msgEmbed.setTitle(`:white_circle: ${streamData.user_name} was live op Twitch.`);
msgEmbed.setDescription('The stream has now ended.');
msgEmbed.addField("Title", streamData.title, true);
}
// Add game
if (streamData.game) {
msgEmbed.addField("Game", streamData.game.name, false);
}
if (isLive) {
// Add status
msgEmbed.addField("Status", isLive ? `Live with ${streamData.viewer_count} viewers` : 'Stream has ended', true);
// Set main image (stream preview)
let imageUrl = streamData.thumbnail_url;
imageUrl = imageUrl.replace("{width}", "1280");
imageUrl = imageUrl.replace("{height}", "720");
let thumbnailBuster = (Date.now() / 1000).toFixed(0);
imageUrl += `?t=${thumbnailBuster}`;
msgEmbed.setImage(imageUrl);
// Add uptime
let now = moment();
let startedAt = moment(streamData.started_at);
msgEmbed.addField("Uptime", humanizeDuration(now - startedAt, {
delimiter: ", ",
largest: 2,
round: true,
units: ["y", "mo", "w", "d", "h", "m"]
}), true);
}
return msgEmbed;
}
}
module.exports = LiveEmbed;
But it won't post the embed, only the msg. as you can see it updates teh msg aswell.
enter image description here
i'm stuck on this for four days now, can someone help?

Setting specific variables based on what value is set in cypress environment variables

I have a custom command called Cypress.Commands.add("createCustomer")
I have a environment variable that specifies in which country this customer will be created.
but the problem is that within the createCustomer function, i would like to set other values based on what country the customer is actually being created. for specific countries there are diffrent currencies, so instead of having to change all the other env.variables i would like them to check like a do-while function.
if(Cypress.env('tenant')) = company_denmark{
Cypress.env('invoiceCurrency') = 'danish-crown',
Cypress.env('billcycle') = 'danish-billcycle-1'
})
Should i place this within the 'createCustomer' function, or can i place this IF-statement in cypress.env?
This is the create customer function;
Cypress.Commands.add("createCustomer", (sendEmail, billingIdName, invoiceCurrency) => {
/*
if(Cypress.env('tenant') = 'company_denmark', {
Cypress.env('invoiceCurrency') = 'danish-crown',
Cypress.env('billcycle') = 'danish-billcycle-1'
})
*/
const todaysDate = Cypress.moment().format('DD MMM, YYYY')
cy.contains('Customers', {timeout: 15000}).click().then(($div) => {
if($div.hasClass('is-expanded')) {
$div.click().first()
}
})
cy.contains('Create Customer', {timeout: 15000}).click()
cy.get('body').then(($body) => {
if($body.find('div[r6-popover="r6-create-customer-popover"]').length > 0) {
cy.get('[r6-permission-context="RETAIL"] > a').click({ force: true })
}
})
cy.wait(1500)
cy.get('.r6loader', {timeout: 30000}).should('not.be.visible')
//skip to Customer Details
cy.r6WizardNext()
// fill in data
cy.get('#business-type').select('Individual')
cy.get('#industry-type').select('Professional Services')
cy.get('#trading-name').type('Testbolaget Cypress ' + todaysDate)
cy.get('#business-number').type('SE999999999901')
cy.get('select[name=customer-title]').select('Mr')
cy.get('input[name=contact-lastName]').type('Gunnar')
cy.get('input[name=contact-firstName]').type('Svensson')
cy.get('input[name=contact-middleName]').type('testarn')
cy.get('input[name=streetName]').type('Testgatan 21')
cy.get('input[name=postCode]').type('123 11')
cy.get('input[name=suburb]').type('Staden')
cy.get('select[name=country]').select('Sweden')
cy.get('select[name=preferred-contact-method]').select('Email')
cy.get('input[name=phoneNumberDayTime]').type('0822334455')
cy.get('input[name=phoneMobile]').type('467223344')
cy.get('input[name=email]').type('test#mail.com')
cy.get('input[name=telia-Id]').type('123456')
cy.get('select[name=company_code] > option')
.eq(1)
.then(option => cy.get('select[name=company_code]').select(option.val()))
cy.get('select[name=profit-Center-Telia] > option')
.eq(1)
.then(option => cy.get('select[name=profit-Center-Telia]').select(option.val()))
cy.r6WizardNext()
if(invoiceCurrency) {
cy.get('#invoiceCurrency').select(invoiceCurrency)
} else {
cy.get('select[name=invoiceCurrency] > option')
.eq(1)
.then(option => cy.get('select[name=invoiceCurrency]').select(option.val()))
}
cy.get('select[name=billCycleKey] > option')
.eq(1)
.then(option => cy.get('select[name=billCycleKey]').select(option.val()))
cy.get('input[name=email-to]').type('test#mail.com')
cy.get('select[name=email-type]').select('Telia Email Type')
cy.r6WizardNext()
cy.get('h4.modal-title:contains("Send summary email")', { timeout: 30000 })
if(sendEmail) {
cy.fillAndSendSummaryEmail()
} else {
cy.get('div.r6modal-buttons > button:contains("Cancel")').click()
}
if(billingIdName) {
cy.get('#r6-customerheader-accountnumber-label').then(($div) => {
cy.wrap($div.text().trim()).as(billingIdName)
})
} else {
cy.get('#r6-customerheader-accountnumber-label').then(($div) => {
cy.wrap($div.text().trim()).as('billingAccountId')
})
}
})
I don't think it is necessary to set the invoice currency and bill cycle as environment variables. At the beginning of your command you can just make the values local constants:
const tenant = Cypress.env('tenant');
const invoiceCurrency = getTenantCurrency(tenant);
const billcycle = getTenantBillcycle(tenant);
This code will run synchronously and the respective values will be available throughout the rest of the command code. getTenantCurrency() and getTenantBillCycle() can be implemented anywhere else.
function getTenantCurrency(tenant) {
switch (tenant) {
case 'company_denmark': return 'danish-crown';
// ...
}
}

Validation on export data for one2many field column in Odoo 13

I want to show validation error when the user export records for state='draft'(in one2many field). I have done code for it and it's working fine. but when I put this code for one2many table then I unable to get a validation message.
My code is below:
class DailyTransaction(models.Model):
_name = 'daily.transaction'
_rec_name = 'batch_id'
date = fields.Date()
batch_id = fields.Char()
daily_transaction = fields.One2many('transaction.log', 'daily_trans_log', string='Daily Transaction')
class Transaction_log(models.Model):
_name = 'transaction.log'
_rec_name = 'daily_trans_log'
daily_trans_log = fields.Many2one('daily.transaction')
log_status = fields.Selection([('Draft', 'Draft'), ('Approved', 'Approved'), ('Confirmed', 'Confirmed')],
default='Draft', string='Log Status')
odoo.define("transaction_log.export_log", function(require) {
"use strict";
var listController = require("web.ListController");
var dialog = require("web.Dialog");
listController.include({
/**
* Opens the Export Dialog
*
* #private
*/
_onExportData: function () {
var self = this;
var do_export = true;
// Avoid calling `read` when `state` field is not available
if (self.initialState.fields.hasOwnProperty('log_status')) {
self._rpc({
model: self.modelName,
method: 'read',
args: [self.getSelectedIds(), ['log_status']],
}).then(function (result) {
// Check if we have at least one draft record
for(var index in result) {
var item = result[index];
if (item.log_status === 'Draft') {
do_export = false;
break;
}
}
if (do_export) {
self._getExportDialogWidget().open();
} else {
dialog.alert(self, "You can't export draft stage data!", {});
}
});
} else {
this._getExportDialogWidget().open();
}
},
});
});
when I export record from 'transaction.log' for 'Draft' log_status then it's work and shows validation message. But I also want to show this validation when export from 'daily.transaction'
Thanks in advance.
You need to add a second condition and read records from the related model to check if there is some record in Draft state.
else if (self.initialState.fields.hasOwnProperty('daily_transaction')){
self._rpc({
model: 'transaction.log',
method: 'search_read',
args: [[['daily_trans_log', 'in', self.getSelectedIds()]], ['log_status']],
}).then(function (result) {
// Check if we have at least one draft record
for(var index in result) {
var item = result[index];
if (item.log_status === 'Draft') {
do_export = false;
break;
}
}
if (do_export) {
self._getExportDialogWidget().open();
} else {
dialog.alert(self, "You can't export draft stage data!", {});
}
});
}
The code after then is the same, I just made a quick example.

NodeJS MSSQL multiple queries promised

I have a lot of problem using the mssql npm library.
Here is my db class :
"use strict"
const mssql = require('mssql');
const moment = require("moment");
let pool_historian = null;
let connexion_historian = null;
function mapBarChart(array) {
return new Promise((resolve, reject) => {
let result = {}
result.old = []
result.this = []
let currentYear = null;
for (let i = 0; i < array.length; i++) {
if (parseInt(moment().format("YYYY")) !== array[i].Annees) {
result.old.push(array[i]);
} else {
result.this.push(array[i]);
}
}
resolve(result);
})
};
class Historian {
constructor(data) {
pool_historian = new mssql.ConnectionPool({
server: data.host,
user: data.username,
password: data.password,
database: data.historian_db,
pool: {
max: 50,
min: 1
}
});
}
getBarChart(sensor, from, to) {
return pool_historian.connect().then(connector => {
return connector.query`SELECT Annees=YEAR(DateTime),Mois=MONTH(DateTime), Valeur=ROUND(sum(AnalogHistory.Value),2) FROM AnalogHistory WHERE AnalogHistory.TagName IN (${sensor}) AND Quality = 0 AND wwVersion = 'Latest' AND wwRetrievalMode = 'Full' AND DateTime >= ${from} AND DateTime <= ${to} AND AnalogHistory.Value > 0 GROUP BY YEAR(AnalogHistory.DateTime),MONTH(AnalogHistory.DateTime) ORDER BY Annees, Mois`.then(result => {
connector.close();
return mapBarChart(result.recordset).then(result => { return result });
//return result.recordset;
}).catch(err => {
return err;
})
})
}
getLineChart() {
return pool_historian.connect().then(connector => {
let variable = "A_000000000000000000000000000045.PV";
return connector.query`SELECT Annees=YEAR(DateTime),Mois=MONTH(DateTime),day=DAY(DateTime), Valeur=ROUND(sum(AnalogHistory.Value),2) FROM AnalogHistory WHERE AnalogHistory.TagName IN (${variable}) AND Quality = 0 AND wwVersion = 'Latest' AND wwRetrievalMode = 'Cyclic' AND DateTime >= '20160101 00:00:00.000' AND DateTime <= '20170809 00:00:00.000' AND AnalogHistory.Value > 0 GROUP BY YEAR(AnalogHistory.DateTime),MONTH(AnalogHistory.DateTime), Day(AnalogHistory.DateTime)ORDER BY Annees, Mois`.then(result => {
connector.close();
return result.recordset;
}).catch(err => {
return err;
})
})
}
close() {
pool_historian.close()
}
}
This class is used in this "business class" :
const Historian = require(`${__dirname}/historian-query`)
const Fdedb = require(`${__dirname}/fdedb-query`)
const moment = require('moment');
moment.locale("fr-FR");
class Graph_Tasks {
constructor() {
this.historian = new Historian({ host: "192.168.1.16", username: "******", password: "w***", historian_db: "R******e" })
this.fdedb = new Fdedb({ host: "192.168.1.16", username: "*****", password: "*****", fde_db: "S*****" })
}
createGraphForBuilding(code) {
return new Promise((resolve, reject) => {
this.fdedb.getList(code).then(list => {
console.log(list)
let datas = [];
//Foreach item on the list perform these 2 queryes
Promise.all([this.historian.getLineChart("A_000000000000000000000000000045.PV", moment().subtract(1, "years").startOf("year").format(), moment().format()), this.historian.getBarChart("A_000000000000000000000000000045.PV", moment().subtract(1, "years").startOf("year").format(), moment().format())]).then(results => {
let datas = []
datas = { "lineChart": null, "barChart": results[0] };
console.log(datas)
res.render('index', { title: 'WebGraph', message: 'Yo Yo', datas });
})
console.log(datas)
resolve(datas)
}).catch(console.log);
});
}
}
module.exports = Graph_Tasks;
As you can see, what I'm trying to do is performing a simultaneous database request. As I read in the documentation, the connection pool must let me do this properly. So when the program arrives at Promise.all, I expected that the 2 requests will be launched simultaneously.
But I get an error :
Une exception s'est produite : Error
Promise Rejection (ConnectionError: Already connecting to database! Call close before connecting to different database.)
ConnectionError: Already connecting to database! Call close before connecting to different database.
at ConnectionError (d:\repositories\fde\node_modules\mssql\lib\base.js:1428:7)
at ConnectionPool._connect (d:\repositories\fde\node_modules\mssql\lib\base.js:235:23)
at EventEmitter.connect.PromiseLibrary (d:\repositories\fde\node_modules\mssql\lib\base.js:217:19)
at ConnectionPool.connect (d:\repositories\fde\node_modules\mssql\lib\base.js:216:12)
at Historian.getBarChart (d:\repositories\fde\class\historian-query.js:39:31)
at __dirname.createGraphForBuilding.Promise.fdedb.getList.then.list (d:\repositories\fde\class\graph_tasks.js:21:188)
at process._tickCallback (internal/process/next_tick.js:109:7)
So my question is: how to adapt the code to let me perform several queries at the same time (the promise.all for each of my list item)?
The issue is that you can't open multiple connection pools to the same server (I assume Fdedb is opening another connection because you didn't include code for it). If, say, you were pulling that data from two different servers then opening two connection pools would be appropriate- I've run into that use case before. But it looks like your two databases are on the same server (localhost), so it's better to open just the one connection and pass it to your objects to make the query. You can touch multiple databases on the same host using plain old SQL, see: How do I query tables located in different database?

How to write more than 25 items/rows into Table for DynamoDB?

I am quite new to Amazon DynamoDB. I currently have 20000 rows that I need to add to a table. However, based on what I've read, it seems that I can only write up to 25 rows at a time using BatchWriteItem class with 25 WriteRequests. Is it possible to increase this? How can I write more than 25 rows at a time? It is currently taking about 15 minutes to write all 20000 rows. Thank you.
You can only send up to 25 items in a single BatchWriteItem request, but you can send as many BatchWriteItem requests as you want at one time. Assuming you've provisioned enough write throughput, you should be able to speed things up significantly by splitting those 20k rows between multiple threads/processes/hosts and pushing them to the database in parallel.
It's maybe a bit heavyweight for that small of a dataset, but you can use AWS Data Pipeline to ingest data from S3. It basically automates the process of creating a Hadoop cluster to suck down your data from S3 and send it to DynamoDB in a bunch of parallel BatchWriteItem requests.
I was looking for some code to do this with the JavaScript SDK. I couldn't find it, so I put it together myself. I hope this helps someone else!
function multiWrite(table, data, cb) {
var AWS = require('aws-sdk');
var db = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for(var x in data) {
// Add the item to the current batch
item_count++;
current_batch.push({
PutRequest: {
Item: data[x]
}
});
// If we've added 25 items, add the current batch to the batches array
// and reset it
if(item_count%25 == 0) {
batches.push(current_batch);
current_batch = [];
}
}
// Add the last batch if it has records and is not equal to 25
if(current_batch.length > 0 && current_batch.length != 25) batches.push(current_batch);
// Handler for the database operations
var completed_requests = 0;
var errors = false;
function handler(request) {
return function(err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
}
// Make the callback if we've completed all the requests
if(completed_requests == batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for(x in batches) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}';
params = JSON.parse(params);
params.RequestItems[table] = batches[x];
// Perform the batchWrite operation
db.batchWrite(params, handler(params));
}
}
function putInHistory(data,cb) {
var arrayOfArray25 = _.chunk(data, 25);
async.every(arrayOfArray25, function(arrayOf25, callback) {
var params = {
RequestItems: {
[TABLES.historyTable]: []
}
};
arrayOf25.forEach(function(item){
params.RequestItems[TABLES.historyTable].push({
PutRequest: {
Item: item
}
})
});
docClient.batchWrite(params, function(err, data) {
if (err){
console.log(err);
callback(err);
} else {
console.log(data);
callback(null, true);
};
});
}, function(err, result) {
if(err){
cb(err);
} else {
if(result){
cb(null,{allWritten:true});
} else {
cb(null,{allWritten:false});
}
}
});
}
You can use lodash to make chunks of data from the array and then use async library's each/every method to do a batchWrite on chunks of 25 elements
Using aws cli and aws-vault, this is what I do.
Let's imagine you have the following file (data.json) with 1000 rows
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}},
... to 1000
and you need to split it into chunk files with 25 rows in each!
I use the following c# code in LinqPad to generate the .sh file and json chunks to be able to insert them into dynamodb using aws cli
void Main()
{
var sourcePath= #"D:\data\whereYourMainJsonFileIsLocated\";
var sourceFilePath = #"data.json";
var awsVaultProfileName = "dev";
var env = "dev";
var tableName = "dynamodb-table-name";
var lines = System.IO.File.ReadAllLines(sourcePath + sourceFilePath);
var destinationPath = Path.Combine(sourcePath, env);
var destinationChunkPath = Path.Combine(sourcePath, env, "chunks");
if (!System.IO.Directory.Exists(destinationChunkPath))
System.IO.Directory.CreateDirectory(destinationChunkPath);
System.Text.StringBuilder shString= new System.Text.StringBuilder();
for (int i = 0; i < lines.Count(); i = i+25)
{
var pagedLines = lines.Skip(i).Take(25).ToList().Distinct().ToList();
System.Text.StringBuilder sb = new System.Text.StringBuilder();
sb.AppendLine("{");
sb.AppendLine($" \"{tableName}\": [");
foreach (var element in pagedLines)
{
if (element == pagedLines.Last())
sb.AppendLine(element.Substring(0, element.Length-1));
else
sb.AppendLine(element);
}
sb.AppendLine("]");
sb.AppendLine("}");
var fileName = $"chunk{i / 25}.json";
System.IO.File.WriteAllText(Path.Combine(destinationChunkPath, fileName), sb.ToString(), Encoding.Default);
shString.AppendLine($#"aws-vault.exe exec {awsVaultProfileName} -- aws dynamodb batch-write-item --request-items file://chunks/{fileName}");
}
System.IO.File.WriteAllText(Path.Combine(destinationPath, $"{tableName}-{env}.sh"), shString.ToString(), Encoding.Default);
}
the result would be chunk files as chunk0.json, chunk1.json, etc
{
"dynamodb-table-name": [
{ "PutRequest": { "Item": { "PKey": { "S": "1" }, "SKey": { "S": "A" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "2" }, "SKey": { "S": "B" }}}},
{ "PutRequest": { "Item": { "PKey": { "S": "3" }, "SKey": { "S": "C" }}}}
]
}
and .sh file
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk0.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk1.json
aws-vault.exe exec dev -- aws dynamodb batch-write-item --request-items file://chunks/chunk2.json
and finally just run the .sh file and you have all data in your table!
From the answer from #Geerek here is the solution with a lambda function:
exports.handler = (event, context, callback) => {
console.log(`EVENT: ${JSON.stringify(event)}`);
var AWS = require('aws-sdk');
AWS.config.update({ region: process.env.REGION })
var docClient = new AWS.DynamoDB.DocumentClient();
const {data, table, cb} = event
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for (var i = 0; i < data.length; i++) {
// Add the item to the current batch
item_count++
current_batch.push({
PutRequest: {
Item: data[i],
},
})
// If we've added 25 items, add the current batch to the batches array
// and reset it
if (item_count % 25 === 0) {
batches.push(current_batch)
current_batch = []
}
}
// Add the last batch if it has records and is not equal to 25
if (current_batch.length > 0 && current_batch.length !== 25) {
batches.push(current_batch)
}
// Handler for the database operations
var completed_requests = 0
var errors = false
function handler (request) {
console.log('in the handler: ', request)
return function (err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
callback(err);
}else {
callback(null, data);
}
// Make the callback if we've completed all the requests
if(completed_requests === batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for (var j = 0; j < batches.length; j++) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}'
params = JSON.parse(params)
params.RequestItems[table] = batches[j]
console.log('before db.batchWrite: ', params)
// Perform the batchWrite operation
docClient.batchWrite(params, handler(params))
}
};
I wrote an npm package that should work as a simple drop-in replacement for the batchWrite method, you just need to pass the dynamoDB instance as the first parameter and things should work:
https://www.npmjs.com/package/batch-write-all
Check the example in the project readme file:
// Use bellow instead of this: dynamodb.batchWrite(params).promise();
batchWriteAll(dynamodb, params).promise();
const { dynamoClient } = require("./resources/db");
const { v4: uuid } = require("uuid");
const batchWriteLooper = async () => {
let array = [];
for (let i = 0; i < 2000; i++) {
array.push({
PutRequest: {
Item: {
personId: uuid(),
name: `Person ${i}`,
age: Math.floor(Math.random() * 100),
gender: "Male",
createdAt: new Date(),
updatedAt: new Date(),
},
},
});
}
var perChunk = 20; // items per chunk
var result = array.reduce((resultArray, item, index) => {
const chunkIndex = Math.floor(index / perChunk);
if (!resultArray[chunkIndex]) {
resultArray[chunkIndex] = []; // start a new chunk
}
resultArray[chunkIndex].push(item);
return resultArray;
}, []);
Promise.all(
result.map(async (chunk) => {
const params = {
RequestItems: {
"persons": chunk,
},
};
return await dynamoClient.batchWrite(params).promise();
})
).then(() => {
console.log("done");
});
};
batchWriteLooper();

Resources