Bulk insert data in mssql table using node mssql - sql-server

I am using node-mssql
My query file is as below
BEGIN TRANSACTION
DECLARE #status NVARCHAR(30);
SET #status = 'create';
DECLARE #i UNIQUEIDENTIFIER;
SET #i = NEWID();
DECLARE #t DATETIME2;
SET #t = SYSUTCDATETIME();
IF NOT EXISTS(
SELECT * FROM user WHERE email = #email AND company_id= #company_id
) BEGIN
SET #i = NEWID();
INSERT INTO user (comapny_id, id, email, password) VALUES ( #company_id, #i, #email, #password);
INSERT INTO user_transaction( id, date, type) VALUES ( #i, #t, #status);
SELECT #i as 'id', #email as 'email';
END ELSE BEGIN
SELECT NULL as 'id', #email as 'email';
END
COMMIT TRANSACTION
And my createuserquery in query.js file is
datastore.getQueryFromSqlFile('create_user', (err: any, query: string) => {
if (err) {
done(err);
} else {
var request = new sql.Request(connectionOrTransaction);
request.input('email', sql.NVarChar(200), email);
request.input('password', sql.NVarChar(200), some_password);
request.input('company_id', sql.UniqueIdentifier, company_id);
request.query(query, function (err, data) {});
Now I need to modify these to insert bulk of user data imported from CSV file (>20000 entries)
I was thinking of doing something like
async.mapSeries(Object.keys(users), function (item, callback) {
query.createuser(email, company_id, function (err, data) {
callback(err, err ? 'Error message: ' + data : data);
});
}, function (err, results) {
})
But this is not efficient as I get connection timeout. Increasing connectionTimeout or requestTimeout in config file doesn't help much.
How could I make my query faster for bulk insert around 20000-40000 entries in per attempt?

For me it looks like a job for a prepared statement.
var ps = new sql.PreparedStatement();
ps.input('email', sql.VarChar);
ps.input('password', sql.VarChar);
ps.input('company_id', sql.Int);
ps.prepare(" ... your sql ... ", function(err) {
// ... error checks
// users must be an array of users
async.mapSeries(users, function(user, next) {
ps.execute({email: user.email, password: user.password, company_id: user.company_id}, next);
}, function(err) {
// ... error checks
ps.unprepare(function(err) {
// ... error checks
// done !
});
});
});
Every execute is called as a single request, so you should not be timeouted by requestTimeout. connectionTimeout is something that only affect connecting phase. Once you're connected to db, only requestTimeout matters.

Related

passing Sp parameter in the body at runtime in snowflake

I have created a stored procedure to accept 2 parameters which helps to determine load type and based on that I am updating the control table to change dates.
When I am calling the SP it is failing due to:
Unexpected identifier in USP_NIGHTLYJOBRESETDAYS at
' ResetDaysDateRange = SELECT (`DATEADD(day,-?,DATEADD(day,DATEDIFF(day, '1900-01-01'::date, CURRENT_TIMESTAMP::date),'1900-01-01'::date))`,binds:[RESETDAYS]);' position 33
Here is the stored procedure code
CREATE OR REPLACE PROCEDURE etl.usp_NightlyJobResetDays (NIGHTLYLOAD VARCHAR(10), RESETDAYS VARCHAR(10))
RETURNS VARCHAR
LANGUAGE JAVASCRIPT
EXECUTE AS CALLER
AS
$$
var sql_command =
`BEGIN
let ResetDaysDateRange;
//Capturing date based on value
ResetDaysDateRange = SELECT (`DATEADD(day,-?,DATEADD(day,DATEDIFF(day, '1900-01-01'::date, CURRENT_TIMESTAMP::date),'1900-01-01'::date))`,binds:[RESETDAYS]);
//checking load type
if (NIGHTLYLOAD ='Yes')
{
EXEC(`Update Reporting.ReportingLoadDetail
SET MaxLoadDate = ?
WHERE IsNightlyLoadImpacted <> 'Yes'`,[ResetDaysDateRange]);
EXEC(`UPDATE Reporting.ReportingLoadDetail
SET MaxLoadDate = CASE WHEN ? > LastInitialLoadDate THEN ?
ELSE LastInitialLoadDate
END
WHERE IsNightlyLoadImpacted = 'Yes'`,[ResetDaysDateRange,ResetDaysDateRange]);
}
//reset restartabilityStatus to completed if last incremental load got failed
EXEC(`UPDATE etl.APILastLoadDetail set RestartabilityStatus = 'Completed'`);
END`
try {
snowflake.execute (
{sqlText: sql_command}
);
return "Succeeded."; // Return a success/error indicator.
}
catch (err) {
return "Failed: " + err; // Return a success/error indicator.
}
$$
;
//Calling sp
call etl.usp_NightlyJobResetDays('Yes',30);
something like this should work:
CREATE OR REPLACE PROCEDURE ETL.usp_NightlyJobResetDays (NIGHTLYLOAD VARCHAR(10), RESETDAYS VARCHAR(10))
RETURNS VARCHAR
LANGUAGE JAVASCRIPT
EXECUTE AS CALLER
AS
$$
try{
//Capturing date based on value
var sql_command1 = `SELECT TO_CHAR((DATEADD(day,-`+RESETDAYS+`,DATEADD(day,DATEDIFF(day, '1900-01-01'::date, CURRENT_TIMESTAMP::date),'1900-01-01'::date))))`;
var ResetDaysDateRange_res = snowflake.execute ({sqlText: sql_command1});
ResetDaysDateRange_res.next()
var ResetDaysDateRange = ResetDaysDateRange_res.getColumnValue(1);
//checking load type
if (NIGHTLYLOAD == `Yes`)
{
var sql_command2 = `Update Reporting.ReportingLoadDetail
SET MaxLoadDate = TO_DATE('`+ResetDaysDateRange+`')
WHERE IsNightlyLoadImpacted <> 'Yes'`;
snowflake.execute ({sqlText: sql_command2});
var sql_command3 = `UPDATE Reporting.ReportingLoadDetail
SET MaxLoadDate = CASE WHEN TO_DATE('`+ResetDaysDateRange+`') > LastInitialLoadDate THEN TO_DATE('`+ResetDaysDateRange+`')
ELSE LastInitialLoadDate
END
WHERE IsNightlyLoadImpacted = 'Yes'`;
snowflake.execute ({sqlText: sql_command3});
}
//reset restartabilityStatus to completed if last incremental load got failed
var sql_command4 = `UPDATE etl.APILastLoadDetail set RestartabilityStatus = 'Completed'`;
snowflake.execute ({sqlText: sql_command4});
return "Success";
}
catch (err) {
return "Failed: " + err; // Return a success/error indicator.
}
$$
;
Best regards,
TK

EF Core 3.1 - Database scalar function - string.Join

Since string.Join(...) is not translatable in the latest EF Core and a lot of our queries do use that method, I'm currently trying to implement it with user-defined functions. I can see that the function actually works in the ssms but I can't get it to work in c#.
The method needs to accept an array of strings as an input so I created a user-defined table type:
IF TYPE_ID('[system].[stringList]') IS NULL
BEGIN
CREATE TYPE system.stringList AS TABLE (val nvarchar(max))
END
GO
Then I created the function itself:
create function toCommaSeparatedString(#strings [system].[stringList] readonly)
returns nvarchar(max)
as
begin
DECLARE #values nvarchar(max)
SELECT #values = COALESCE(#values + ', ', '') + [val]
FROM #strings
return #values
end
I can verify that it works by simply executing the following sql in the ssms:
declare #input [system].[stringList]
insert into #input values ('1'), ('2'), ('3'), ('4')
select dbo.toCommaSeparatedString(#input)
In c# I declared the function on my DbContext (I tried both string[] and IEnumerable<string>):
[DbFunction("toCommaSeparatedString", "dbo")]
public static string ToCommaSeparatedString(string[] strings) => throw new NotImplementedException();
And used it:
...
var output = MyDbContext.ToCommaSeparatedString(new [] { "1", "2" });
...
but I'm getting an exception:
"The parameter 'strings' for the DbFunction 'BusinessUnitDbContext.ToCommaSeparatedString' has an invalid type 'string[]'. Ensure the parameter type can be mapped by the current provider.",
Is it possible to achieve what I'm trying to achieve here? Should I also configure that function on the DbContext?
EDIT:
Here is my custom projection that i want to use to create the viewmodel:
private static IQueryable<IView> ProjectToJobOverView(this IQueryable<Job> entities)
{
return entities.Select(j => new JobOverView
{
Id = j.EntityId,
Deleted = j.Deleted.HasValue,
Name = j.Name,
Status = j.Status.ToString(),
NumberOfInstructionSets = j.Tasks.Count(t => t.InstructionSet != null),
NumberOfCompletedInstructionSets = j.Tasks.Count(t => t.InstructionSet.IsCompleted),
NumberOfOrderLines = j.Tasks
.SelectMany(t => t.TaskOrderLines).Select(x => x.OrderLineId)
.Distinct()
.Count(),
Details = j.Tasks
.OfType<StockMovementTask>()
.Where(t => t.InstructionSet != null)
.Select(t => t.InstructionSet)
.Select(s => new JobOverviewDetail
{
Id = s.EntityId,
Deleted = s.Deleted.HasValue,
State = s.Tasks.First().OperationState.ToString(),
DeliveryMethod = BusinessUnitDbContext.ToCommaSeparatedString(
s.Tasks
.First()
.TaskOrderLines.Where(x => x.OrderLine.Order is OutgoingOrder && (x.OrderLine.Order as OutgoingOrder).DeliveryType != null)
.Select(x => (x.OrderLine.Order as OutgoingOrder).DeliveryType.Name).ToArray()),
}),
Categories = j.Tasks.Select(t => t.Process.ProcessDefinition.ProcessDefinitionGroup.ProcessDefinitionCategory.ToString()).Distinct().ToArray(),
OrderNumbers = j.Tasks.OfType<StockMovementTask>().SelectMany(t => t.TaskOrderLines).Select(j => j.OrderLine.Order.Number).Distinct().ToArray(),
});
}

How to execute stored procedure from Nodejs

I have this router in nodejs and I want to execute a stored procedure:
router.post('/upadateCreateSiteMonth', isAuthed, function(req, res) {
//console.log('REQ USER', req.user);
var q = 'USE [EYE]' +
'GO' +
'DECLARE #return_value int' +
'EXEC #return_value = [dbo].[uspCreateSiteMonthConsumption]' +
'SELECT \'Return Value\' = #return_value' +
'GO'
exp.emitter.emit('mssql',{ q: q}, function(err, data) {
if (err) {
res.status(500).json({ error : err });
} else {
res.status(200).json(data);
}
});
});
When I write this in SQL Server database it is ok
DECLARE #return_value INT
EXEC #return_value = [dbo].[uspCreateSiteMonthConsumption]
SELECT 'Return Value' = #return_value
GO
How can I write the variable q to respond with 200 this router?
A simple query like Select * from table works ok, but when I try to use this query to run the stored procedure, I get a response of 500.

How to insert multiple records on Transaction

I have a question about SQL Server transactions
I want to insert datas into Table_A and Table_B.
Table_B has a Table_A's key.
Table_B's records size (has Table_A's key) is dynamic.
[Table_A]
id: ,
title:
[Table_B]
id: ,
tableA_id:,
title:
My code work only the case insert data is static size.
like this
var tableBtitles = ['abc','def','ghi'] //this size is dynamic
const transaction = new sql.Transaction()
transaction.begin(err => {
const request = new sql.Request(transaction)
request.query("insert into tableA (title) output Inseted.Id values('a')" , (err, result) => {
const request = new sql.Request(transaction)
request.input('tableA_id',mssql.NVarChar,
result['recordset'][0]['id']);
request.input('title1',mssql.NVarChar,
tableBtitles[0]);
request.input('title2',mssql.NVarChar,
tableBtitles[1]);
request.input('title3',mssql.NVarChar,
tableBtitles[2]);
request.query('insert into tableB (tableA_id,title) values(#tableA_id,#title1),(#tableA_id,#title2),(#tableA_id,#title2)), (err, result) => {
transaction.commit(err => {
})
})
})
Please tell me how to do it.
try this syntax
insert into [Table_A] ([data])
output inserted.id, inserted.data into [Table_B]
--check here
https://stackoverflow.com/a/3712735/8543453

What is the node.js equivalent of this T-SQL query

The legacy system used to store passwords in query's output format,
SELECT
HASHBYTES('SHA1', CONVERT(VARCHAR, HASHBYTES('SHA1', CONVERT(NVARCHAR(4000), ’test'))) + 'mysalt')
where the password is test and mysalt is the salt used.
The result is something like
0x169A0EF01AA369518D6810E14872A3A003A1F0AA
I have to take that encrypted password and create a node function to get the same result as the above query
Node.js is not going to replace a t-sql query. You would still use t-sql to query your database and something like the tedious module connection to the database. This is an example from https://msdn.microsoft.com/library/mt715784.aspx on how to connect from node.js to SQL Server and execute a query. Some modifications to the executeStatement function would get you going.
var Connection = require('tedious').Connection;
var config = {
userName: 'yourusername',
password: 'yourpassword',
server: 'yourserver.database.windows.net',
// When you connect to Azure SQL Database, you need these next options.
options: {encrypt: true, database: 'AdventureWorks'}
};
var connection = new Connection(config);
connection.on('connect', function(err) {
// If no error, then good to proceed.
console.log("Connected");
executeStatement();
});
var Request = require('tedious').Request;
var TYPES = require('tedious').TYPES;
function executeStatement() {
request = new Request("SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;", function(err) {
if (err) {
console.log(err);}
});
var result = "";
request.on('row', function(columns) {
columns.forEach(function(column) {
if (column.value === null) {
console.log('NULL');
} else {
result+= column.value + " ";
}
});
console.log(result);
result ="";
});
request.on('done', function(rowCount, more) {
console.log(rowCount + ' rows returned');
});
connection.execSql(request);
}

Resources