Node-SQL with node.js and MS integrated Security - sql-server

I have some sample code that is successfully connecting to SQL Server using Microsoft SQL Server user name and password. But I was wondering if there is a way to use integrated security with this script. Basically which means use the logged in user's credentials without supplying a password in the script.
var sql = require('mssql');
var config = {
server: '127.0.0.1',
database: 'master',
user: 'xx',
password: 'xxx',
options : {
trustedConnection : true
}
}
var connection = new sql.Connection(config, function(err) {
// ... error checks
if(err) {
return console.log("Could not connect to sql: ", err);
}
// Query
var request = new sql.Request(connection);
request.query('select * from dbo.spt_monitor (nolock)', function(err, recordset) {
// ... error checks
console.dir(recordset);
});
// Stored Procedure
});

Wish I could add this as a comment but don't have enough reputation yet... but what happens when you run this without providing a username/password in the config object?
Windows Authentication happens at the login level so there is no need to provide it at the application level.
Just browsed the documentation and looks like you cannot provide a raw connection string to connect, but to connect you want to build something that looks like this:
var connectionString= 'Server=MyServer;Database=MyDb;Trusted_Connection=Yes;'
The source code of the mssql module is here: https://github.com/patriksimek/node-mssql/blob/master/src/msnodesql.coffee... maybe you can fork and do a pull request that would provide an optional flag whether to use Windows Authentication or not, and that flag would remove the Uid={#{user}};Pwd={#{password}} (as it's unneeded for Windows Authentication) from the CONNECTION_STRING_PORT variable in the module's source code.

Related

Sending Email alerts from Snowflake

Is there any way we can send email alerts if stored procedure fails in Snowflake?
When I checked snowflake documentation, there is no mention of email utility in Snowflake
You can send email directly from Snowflake optionally sending data from a table/view as an attachment. This is done using Snowflake External function which in turn calls an AWS Lambda function via AWS Gateway.
The first step is to setup the AWS Gateway. You may follow instructions below:
Creating a Customizable External Function on AWS
If you got the sample function working from Snowflake, you have successfully setup the foundation for adding email functionality. Next is to setup an S3 bucket to create datafiles that need to be sent as email-attachment.
Create an AWS S3 bucket with the name 'snowapi'. We need not expose this bucket to the internet, so keep 'Block all public access' set to ON.
Now you need to provide Snowflake access to this Bucket. Create an IAM user 'snowflake'. Add Permissions -> Attach exiting Policy: AmazonS3FullAccess. Go to 'Security Credetials' tab and 'Create access key'. Use the Access Key ID and Secret Access Key in the below command to unload data into S3 bucket.
CREATE OR REPLACE UTIL.AWS_S3_STAGE URL='s3://snowapi/'
CREDENTIALS=(AWS_KEY_ID='ABCD123456789123456789'
AWS_SECRET_KEY='ABCD12345678901234567890123456789');
COPY INTO #UTIL.AWS_S3_STAGE/outbound/SampleData.csv
FROM
FILE_FORMAT =
OVERWRITE = TRUE
SINGLE = TRUE;
Next step is to create a new Lambda function using the Nodejs code below. Note that this uses SENDGRID API. Sendgrid has a forever-free tier with 100 emails per day. I installed this library locally and uploaded the zip file to AWS to create the Lambda function.
//Lambda Function name: email
const sgMail = require('#sendgrid/mail');
var AWS = require('aws-sdk');
var s3 = new AWS.S3();
exports.handler = async (event, context, callback) => {
sgMail.setApiKey(process.env.SENDGRID_KEY);
const paramArray = JSON.parse(event.body).data[0];
//paramArray[0] has the row number from Snowflake
var message = {
to: paramArray[1].replace(/\s/g, '').split(','),
from: paramArray[2].replace(/\s/g, ''),
subject: paramArray[3],
html: paramArray[4]
};
// Attach file
if (paramArray.length > 5) {
var fileName = paramArray[5].substring(paramArray[5].lastIndexOf("/")+1);
var filePath = paramArray[5].substring(0, paramArray[5].lastIndexOf("/"));
try {
const params = {Bucket: process.env.BUCKET_NAME + filePath, Key: fileName};
const data = await s3.getObject(params).promise();
var fileContent = data.Body.toString('base64');
} catch (e) {
throw new Error(`Could not retrieve file from S3: ${e.message}`);
}
message.attachments = [{content: fileContent,
filename: fileName,
type: "application/text",
disposition: "attachment"
}];
}
try{
await sgMail.send(message);
return {
'statusCode': 200,
'headers': { 'Content-Type': 'application/json' },
'body' : "{'data': [[0, 'Email Sent to "+ paramArray[1] + "']]}"
};
} catch(e){
return {
'statusCode': 202,
'headers': { 'Content-Type': 'application/json' },
'body' : "{'data': [[0, 'Error - " + e.message + "']]}"
};
}
};
Set the below two environment variables for the Lambda function:
SENDGRID_KEY: <sendgrid_api_key>
BUCKET_NAME: snowapi
Create a Snowflake External Function:
create or replace external function util.aws_email
(mailTo varchar,mailFrom varchar,subject varchar,htmlBody varchar,fileName varchar)
returns variant
api_integration = aws_api_integration
as 'https://xxxxxxxxxx.execute-api.us-east-1.amazonaws.com/PROD/email';
Create a wrapper Procedure for the above external function:
create or replace procedure util.sendemail
(MAILTO varchar,MAILFROM varchar,SUBJECT varchar,HTMLBODY varchar,FILENAME varchar)
returns string
language javascript
EXECUTE AS OWNER
as
$$
// Call the AWSLambda function.
var qry = "select util.aws_email(:1,:2,:3,:4,:5)";
// null should be in lowercase.
var stmt = snowflake.createStatement({sqlText: qry,
binds: [MAILTO,
MAILFROM||'no-reply#yourdomain.com',
SUBJECT ||'Email sent from Snowflake',
HTMLBODY||'<p>Hi there,</p> <p>Good luck!</p>',
FILENAME||null]
});
var rs;
try{
rs = stmt.execute();
rs.next();
return rs.getColumnValue(1);
}
catch(err) {
throw "ERROR: " + err.message.replace(/\n/g, " ");
}
$$;
All Set! The end result is a clean call that sends email like below.
Call SENDEMAIL('to_email#dummy.com, to_another_email#dummy.com',
'from#yourdomain.com',
'Test Subject',
'Sample Body');
Good Luck!!
I believe there is no email utility in Snowflake, But you can run your snowflake stored procedure using python and check the stored procedure status, based on the status you can trigger mail from python.
Sending Email Notifications:
This feature uses the notification integration object, which is a Snowflake object that provides an interface between Snowflake and third-party services (e.g. cloud message queues, email, etc.). A single account can define a maximum of ten email integrations and enable one or more simultaneously.
To create an email notification integration, use the CREATE
NOTIFICATION INTEGRATION command with TYPE=EMAIL:
CREATE [ OR REPLACE ] NOTIFICATION INTEGRATION [IF NOT EXISTS]
<integration_name>
TYPE=EMAIL
ENABLED={TRUE|FALSE}
ALLOWED_RECIPIENTS=('<email_address_1>' [, ... '<email_address_N>'])
[ COMMENT = '<string_literal>' ]
;
After creating the email notification integration, you can call SYSTEM$SEND_EMAIL() to send an email notification, as follows:
CALL SYSTEM$SEND_EMAIL(
'<integration_name>',
'<email_address_1> [, ... <email_address_N>]',
'<email_subject>',
'<email_content>'
);
...
For example:
CALL SYSTEM$SEND_EMAIL(
'my_email_int',
'person1#example.com, person2#example.com',
'Email Alert: Task A has finished.',
'Task A has successfully finished.\nStart Time: 10:10:32\nEnd Time: 12:15:45\nTotal Records Processed: 115678'
);
We use the snowsql command from bash scripts, and use the "-o exit_on_error=true" option on the command line, checking the return code at the end of the step. If the Snowflake commands have failed, then the exit on error setting will mean that Snowflake will stop at the point of the error and return control to the calling program.
If the return code is zero, then we move onto the next step.
If it is non-zero, then we call an error handler which sends an email and then quits the job.
We're on Amazon Linux for our orchestration, and we use mutt as an email application.

Accessing Dynamics CRM via username/password throwing AdalServiceException: AADSTS65001

I followed the quickstart here: https://learn.microsoft.com/en-us/powerapps/developer/common-data-service/webapi/enhanced-quick-start
Which worked great, so then I need to register my app, so I followed this:
https://learn.microsoft.com/en-us/powerapps/developer/common-data-service/walkthrough-register-app-azure-active-directory
But now my unit tests give me the error:
Microsoft.IdentityModel.Clients.ActiveDirectory.AdalServiceException:
AADSTS65001: The user or administrator has not consented to use the
application with ID '[GUID]' named '[AppName]'. Send an interactive
authorization request for this user and resource.
I feel like I understand the error, that the administrator needs to consent. My program is doing some magic in the bakcgorund and the user is not signing in, it is using a set username and password and the user should not be consenting to anyone. Is there any way to set this consent permanently, or force it every time through the Helper class in the first tutorial? All my Google-fu came up empty... Thank you.
You can use something like this:
CrmserviceClient is from Microsoft.Xrm.Tooling.Connector nuget
private CrmServiceClient GenerateService()
{
ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12;
ServicePointManager.Expect100Continue = true;
ServicePointManager.CheckCertificateRevocationList = true;
ServicePointManager.DefaultConnectionLimit = 10;
var service = new CrmServiceClient(new Uri(organizationUrl), clientId, secret, false, string.Empty);
if (service.IsReady == false)
{
throw new Exception("CrmOrgService isn't ready. " + service.LastCrmError);
}
return service;
}
Or if you want to use connection string you can use this:
Connection string : https://learn.microsoft.com/en-us/dynamics365/customerengagement/on-premises/developer/xrm-tooling/use-connection-strings-xrm-tooling-connect
var connectionString =
ConfigurationManager.ConnectionStrings["XY"].ConnectionString;
var conn = new CrmServiceClient(connectionString);
IOrganizationService orgService = conn.OrganizationServiceProxy;

Pick up connection if there is a disconnect

I make use of this specific version: https://github.com/patriksimek/node-mssql/tree/v3.3.0#multiple-connections of the SQL Server npm package.
I have been looking through the documentation of tedious (the underlying lib) and Microsofts documentation (see the github link above).
I couldn't find anything that does something simple like getCurrentConnection, or getConnectionStatus or anything similar.
I had two ways to solve this problem but I'm not happy with both of them so that's why I'm asking here.
My first approach was to set a timeout and let the connect function call itself on each catch(err).
The second one was to handle this in the middleware but then if all is working fine it will make a connection to SQL on every request and closing that connection again.
My middleware function:
api.use(function(err, req, res, next){
sql.close();
sql.connect(config.database).then(() => {
next();
}).catch(function(err) {
sql.close();
server.main();
});
});
I want to, if possible pick up the connection instead of closing and starting a new one with regards to when the server or the database crashes I still have some data from the existing function.
By the help of Arnold I got to understand the mssql package and it's inner workings a lot better.
Therefor I came up with the following solution to my problem.
let intervalFunction;
const INTERVAL_DURATION = 4000;
if (require.main === module){
console.log("Listening on http://localhost:" + config.port + " ...");
app.listen(config.port);
// try to connect to db and fire main on succes.
intervalFunction = setInterval(()=> getConnection(), INTERVAL_DURATION);
}
function getConnection() {
sql.close();
sql.connect(config.database).then(() => {
sql.close();
clearInterval(intervalFunction);
main();
}).catch(function(err) {
console.error(err);
console.log(`DB connection will be tried again in ${INTERVAL_DURATION}ms`)
sql.close();
});
}
Once the initial connection has been made but it got lost in the meantime the pool will pick up the connection automatically and handle your connections
If I understood you correctly, you basically want to reuse connections. Tedious has built-in connection pooling, so you don't have to worry about re-using them:
var config = {
user: '...',
password: '...',
server: 'localhost',
database: '...',
pool: {
max: 10,
min: 0,
idleTimeoutMillis: 30000
}
}
In the example above (just copied from the GitHub URL you've posted), there will be 10 connections in the pool ready to use. Here's the beauty: the pool manager will handle all connection use and re-use for you, i.e., the number of connections are elastic based on your app's needs.
As you've mentioned, what about DB crashes? That too is built-in: connection health-check:
Internally, each Connection instance is a separate pool of TDS
connections. Once you create a new Request/Transaction/Prepared
Statement, a new TDS connection is acquired from the pool and reserved
for desired action. Once the action is complete, connection is
released back to the pool. Connection health check is built-in so once
the dead connection is discovered, it is immediately replaced with a
new one.
I hope this helps!

OWIN Invalid URI: The Uri String is too long

I have an MVC application hosted on a server (IIS) which points to 3 SQL databases. This has been running without issues for months.
I've just had to change the connectionstrings for all 3 SQL databases to point to new databases.
Now when I try to log in I get the following error..
The connection strings are using Windows Authentication and this account is set in the AppPool. I've also manually tried to connect to each database instance with the account and this works fine. I'm beginning to think the change is SQL connections is just a red herring.
In terms of the error message, I totally understand what the error is Im just not sure why its being thrown. The only thing I can think of is I'm in some kind of redirect loop which is appending the URL.
It definitely feels like an IIS issue but I can't put my finger on it.
Has anyone come across this before with OWIN or can advise on debugging steps that might diagnose the issue?
Startup.cs
public partial class Startup
{
private static bool IsAjaxRequest(IOwinRequest request)
{
IReadableStringCollection query = request.Query;
if ((query != null) && (query["X-Requested-With"] == "XMLHttpRequest"))
{
return true;
}
IHeaderDictionary headers = request.Headers;
return ((headers != null) && (headers["X-Requested-With"] == "XMLHttpRequest"));
}
public void ConfigureAuth(IAppBuilder app)
{
// Configure the db context, user manager and role manager to use a single instance per request
app.CreatePerOwinContext(ParentDbContext.Create);
app.CreatePerOwinContext<ApplicationUserManager>(ApplicationUserManager.Create);
app.CreatePerOwinContext<ApplicationRoleManager>(ApplicationRoleManager.Create);
app.CreatePerOwinContext<ApplicationSignInManager>(ApplicationSignInManager.Create);
app.CreatePerOwinContext(PrincipalManager.Create);
// Enable the application to use a cookie to store information for the signed in user
// and to use a cookie to temporarily store information about a user logging in with a third party login provider
// Configure the sign in cookie
app.UseCookieAuthentication(new CookieAuthenticationOptions
{
AuthenticationType = DefaultAuthenticationTypes.ApplicationCookie,
LoginPath = new PathString("/Account/Login"),
Provider = new CookieAuthenticationProvider
{
// Enables the application to validate the security stamp when the user logs in.
// This is a security feature which is used when you change a password or add an external login to your account.
OnValidateIdentity =
SecurityStampValidator.OnValidateIdentity<ApplicationUserManager, ApplicationUser, Guid>(
TimeSpan.FromMinutes(int.Parse(WebConfigurationManager.AppSettings["RefreshInterval"])),
(manager, user) => manager.GenerateUserIdentityAsync(user),
claim => new Guid(claim.GetUserId())),
OnApplyRedirect = ctx =>
{
if (!IsAjaxRequest(ctx.Request))
{
ctx.Response.Redirect(ctx.RedirectUri);
}
}
}
});
}
}
After hours of investigation I eventually found the issue.
The issue was the number of claims being added for a user. Once we reduced the number of claims it started working again.
The most likely cause is that you're stuck in an error loop. If the authentication to the database where the users is stored is failing then you will get sent to the error page which will try to run the authentication again and fail and send you to the error page, again and again. Each pass it would append to the previous url eventually reaching this state.

Create a Backbone.Model to manage a server connection

So basically, I want building a test app where I need to enter server credentials in order to connect to an ElasticSearch server and I want this connection to be only available for the duration of the session.
I thought this could be managed via Backbone.Model but I'm not too sure how to handle it. Do I need a Collection as well. Do I store this in the Session or locaStorage?
Page 1
Form with protocol, host, post, username, password
Backbone.View.extend
...
events: {
'submit #connection-form' : 'connect'
},
...
connect: function(){
console.log('Connecting...');
protocol = $("#protocol").val();
host = $("#host").val();
port = $("#port").val();
user = $("#username").val();
password = $("#password").val();
App.connection = new Connection({ protocol: protocol, host: host, port: port, user: user, password: password})
self = this
App.connection.connect(function(response){
if (response.status == 200) {
App.connection.set('name', response.name)
App.router.navigate('dashboard', {trigger: true});
} else {
$(self.el).prepend(_.template(noticeTpl, { type: 'danger', message: "Could not connect to the server." }))
}
});
return false;
}
Page 2
List of indexes on my ElasticSearch server
I need to store what have been submitted on page one across all the session in order to be able to query the server at any time.
No reason you'll need a collection since you're only dealing with a single object. I also don't see any reason to use localStorage unless you're storing a lot of data, which you aren't, or want the app to be usable offline, which doesn't make any sense here.
Using sessionStorage is pretty straightforward:
sessionStorage.setItem('user', JSON.stringify(user));
var obj = JSON.parse(sessionStorage.getItem('user'));

Resources