using tedious connection,need to get the total data - reactjs

hai I am new to tedious and Es-6,It may be a silly question but I am struggling,
I want the total data in a array, using tedious connections here is my code:
getZipData() {
var Connection = require('tedious').Connection;
Request = require('tedious').Request;
var config = {
userName: 'xx',
password: 'xxxx',
server: 'xxx', // You can use 'localhost\\instance' to connect to named instance
options: {
database: 'xxxxx',
rowCollectionOnDone:'true'
}
}
var connection = new Connection(config);
var jsonArray = [];
connection.on('connect', function (err) {
if (err) {
console.log(err)
}
var sql = "SELECT * FROM xxxxx";
return new Promise(function(resolve,reject){
var request = new Request(sql,
(err, rowCount, rows)=>{
if (err) {
reject(err);
}
else {
alert("rows");
console.log(rowCount + 'rows');
}
});
request.on('row', (columns)=>{
var rowObject = {};
columns.forEach((column)=> {
rowObject[column.metadata.colName] = column.value;
});
jsonArray.push(rowObject);
});
connection.execSql(request);
request.on('done', function(rowCount, more) {
console.log(rowCount + ' rows returned');
alert("jsonArray2:"+jsonArray);
resolve(jsonArray)
});
});
})
}
componentWillMount() {
this.getZipData().then(function(resolved){
console.log(resolved);
alert("data:"+resolved);
}).catch(function(rejected){
console.log(rejected);
})
}
when i add the request.on('done', function(rowCount, more) also i didn't get any data can any one give the solution for it,
I want the total data to be displayed

It looks like you're calling resolve before your query has been executed:
var jsonArray = [];
// Register callback for row event
request.on('row', (columns)=>{
var rowObject = {};
columns.forEach((column)=> {
rowObject[column.metadata.colName] = column.value;
});
jsonArray.push(rowObject);
});
// Call resolve before executing request
resolve(jsonArray);
connection.execSql(request);
The docs mention a done event that indicates a request has completed:
request.on('done', function (rowCount, more, rows) {
// Call resolve here instead?
resolve(jsonArray);
});
Disclaimer: I've haven't actually used Tedious, but from the docs linked this looks like what you're looking for.

Related

throw new ERR_INVALID_ARG_TYPE('chunk',['string','Buffer'],chunk);TypeError[ERR_INVALID_ARG_TYPE]:The "chunk" arg must be type string or Buffer

I am trying to get the contents of a .json file using a node js service into an angularjs method. But am getting following error:
_http_outgoing.js:700
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type object
at ServerResponse.end (_http_outgoing.js:700:13)
here are the corresponding code fragments...
angular controller: the commented lines are all of those which i have tried and failed with.
var currentProcess = "process_1cA";
$scope.storestats = [];
var resAss = $resource('/procs/getstorestats');
var stats = resAss.get({
process: currentProcess,
date: date.getFullYear() + "" + m + "" + d
});
stats.$promise.then(function(response) {
if (response != undefined) {
// var r = JSON.parse(response);
//$scope.storestats.push(r);
//$scope.storestats.push(r);
//var r = JSON.parse(response);
$scope.storestats.push(response);
//angular.forEach(r, function(value, key) {
// $scope.storestats.push({key : value});
//});
}
});
NODEJs service:
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
//var r = data.toString('utf8');
var msgs2 = JSON.stringify(msgs1);
console.log(msgs1);
res.end(msgs1);
});
}
catch (err) {
res.end(err.toString());
}});
P.S: The commented out lines are those which i have tried out with and failed. Also, the commented lines in the node service code snippet, give no error, and when logged show it correctly, but the data when in response of the controllers is blank.
I'm guessing a bit here, but I think you just need to change res.end() to res.send() in your Node code. The "end" method is used when you are streaming chunks of data and then you call end() when you're all done. The "send" method is for sending a response in one go and letting Node handle the streaming.
Also, be sure you are sending a string back!
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
//var r = data.toString('utf8');
var msgs2 = JSON.stringify(msgs1);
console.log(msgs1);
res.send(msgs2); // NOTE THE CHANGE to `msg2` (the string version)
});
}
catch (err) {
res.send(err.toString()); // NOTE THE CHANGE
}
});
I had a similar error. It was because I was passing process.pid to res.end(). It worked when I changed process.pid to string
res.end(process.pid.toString());
Figured it out. 2 small changes were needed.. One in the controller, which was to use a "$resource.query" instead of "$resource.get". And in the service, as #jakarella said, had to use the stringified part in the .end();
Controller:
var resAss = $resource('/procs/getstorestats');
var stats = resAss.query({process: currentProcess, date: date.getFullYear() + "" + m + "" + d});
stats.$promise.then(function (response) {
$scope.storestats.push(response);
}
Node Service:
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
var msgs2 = JSON.stringify(msgs1);
console.log(msgs2);
res.end(msgs2);
});
}
If you are using 'request-promise' library set the json
var options = {
uri: 'https://api.github.com/user/repos',
qs: {
access_token: 'xxxxx xxxxx'
},
headers: {
'User-Agent': 'Request-Promise'
},
json: true // Automatically parses the JSON string in the response
};
rp(options)
.then(function (repos) {
})
.catch(function (err) {
});
Thank you user6184932, it work
try {
await insertNewDocument(fileNameDB, taskId);
res.end(process.pid.toString());
} catch (error) {
console.log("error ocurred", error);
res.send({
"code": 400,
"failed": "error ocurred"
})
}
in mysql2 the reason for the error is the sql word , sql is a query :
const sql = select * from tableName
pool.executeQuery({
sql,
name: 'Error list for given SRC ID',
values: [],
errorMsg: 'Error occurred on fetching '
})
.then(data => {
res.status(200).json({ data })
})
.catch(err => {
console.log('\n \n == db , icorp fetching erro ====> : ', err.message, '\n \n')
})
I got the error using Node v12 (12.14.1).
Uncaught TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type number
Sample code for context.
const { Readable } = require('stream')
Readable.from(Buffer.from(base64content, 'base64'))
.pipe( ... )
Solution (for my case), was upgrading to Node v14 (14.17.3). e.g.
nvm use 14
nvm

Can't compare MongoDB data with javascript array

I want to compare the data which I got from Mongo to javascript array. I am using lodash to compare. But it always return incorrect result.
var editUser = function(userData, getOutFunction) {
var status = CONSTANTS.NG;
checkExistUser(userData._id).then(function(user) {
if (user !== null) {
var userGroup = JSON.stringify(user.group);
user.group = user.group.map((groupId) => {
return groupId.toString();
});
var removedGroups = _.difference(userGroup, userData.group);
var addedGroups = _.difference(userData.group, userGroup);
console.log('Removed Groups: ', removedGroups);
console.log('Added Groups: ', addedGroups);
} else {
status = CONSTANTS.NG;
logger.debug(DEBUG_CLASS_NAME, "Cannot find object");
if (typeof(getOutFunction) !== 'undefined') {
getOutFunction(status, null);
} else {
NO_CALLBACK();
}
}
}).catch(function() {
console.log('Promise is error');
});
var checkExistUser = function(userId) {
return new Promise(function(resolve, reject) {
UserDAO.findById(userId, function(err, user) {
if (err) {
logger.debug(DEBUG_CLASS_NAME, {
name: err.name,
code: err.code,
message: err.message,
method: "checkExist"
});
resolve(null);
} else {
resolve(user);
}
});
});
}
For example:When I try to input value for lodash difference function
var user.group = ["58b8da67d585113517fed34e","58b8da6ed585113517fed34f"];
var userData.group = [ '58b8da67d585113517fed34e' ];
I want lodash difference return below result:
Removed Groups: ['58b8da6ed585113517fed34f']
Added Groups: []
However, the function gave me the result like:
Removed Groups: []
Added Groups: [ '58b8da67d585113517fed34e' ]
Can anyone help me in this case?
I will do appreciate it.
I have had this issue as well, the result from mongodb is an ObjectId type so you can compare the someObjectId.toString() value with your array of strings, or you could use
someObjectId.equals(stringOrObjectIdValue)
However, if you want to keep using lodash functions you will either have to force both arrays to strings or to ObjectIds before passing them into the function.

Multiple Queries with Parse Cloud Code Using Promises

I have two questions:
Is the below example the right way to execute multiple Parse queries in a single Cloud Code function?
Is the below example going to provide all the data I'm querying with one HTTP request (when I call logbookEntries) and then count as two Parse requests on my account because it's two Parse queries?
Here's the code:
Parse.Cloud.define("logbookEntries", function(request, response) {
//::: Query 1 :::
var firstQuery = new Parse.Query("Logbook");
var returnData = [];
firstQuery.find().then(function(firstResults) {
returnData[0] = firstResults;
}).then(function(result) {
//::: Query 2 :::
var secondQuery = new Parse.Query("Logbook");
secondQuery.find().then(function(secondResults))
returnData[1] = secondResults;
}).then(function(result) {
response.success(returnData);
}, function(error) {
response.error(error);
});
});
Thanks in advance.
It's one way, though not quite correct.
Yes
Your code should really be:
Parse.Cloud.define("logbookEntries", function(request, response) {
//::: Query 1 :::
var firstQuery = new Parse.Query("Logbook");
var returnData = [];
firstQuery.find().then(function(firstResults) {
returnData[0] = firstResults;
var secondQuery = new Parse.Query("Logbook");
return secondQuery.find();
}).then(function(result) {
returnData[1] = result;
response.success(returnData);
}, function(error) {
response.error(error);
});
});
Or, a better way to structure it would be:
Parse.Cloud.define("logbookEntries", function(request, response) {
var firstQuery = new Parse.Query("Logbook");
var secondQuery = new Parse.Query("Logbook");
var promises = [];
promises.push(firstQuery.find());
promises.push(secondQuery.find());
Parse.Promise.when(promises).then(function(result1, result2) {
var returnData = [];
returnData[1] = result1;
returnData[2] = result2;
response.success(returnData);
}, function(error) {
response.error(error);
});
}
Just to update Wain's structured code:
Promise.when returns array when supplied with an array, so the correct code would be
Parse.Promise.when(promises).then(function([result1, result2]) {
and since there is no need to repack the array, it would simply be
Parse.Promise.when(promises).then(function(result) {
response.success(result);
See here for more info.

Adding to an array asynchronously in Node.js

I'm pretty new to this type of programming and I'm having some trouble populating an array from a nested call. I'm pretty sure this needs to be done using callbacks, but I'm having trouble wrapping my brain around it. Closures must also come into play here. I tried searching the web for a similar example but didn't find much.
Here is my original code. I tried a few different approaches but didn't pull it off.
TaskSchema.statics.formatAssignee = function(assignees) {
var users = [];
assignees.forEach(function(uid) {
mongoose.model('User').findById(uid, function(err, user) {
users.push({
name: user.name.full
, id: user.id
});
});
});
return users;
}
I really like the following pattern (recursion is the most elegant solution to async loops):
TaskSchema.statics.formatAssignee = function(assignees, callback) {
var acc = []
, uids = assignees.slice()
(function next(){
if (!uids.length) return callback(null, acc);
var uid = uids.pop()
mongoose.model('User').findById(uid, function(err, user) {
if (err) return callback(err);
acc.push({
name: user.name.full
, id: user.id
});
next();
});
})();
}
Check out async, it has an async foreach loop.
Edit
Here is the foreach method from the async library
async.forEach = function (arr, iterator, callback) {
if (!arr.length) {
return callback();
}
var completed = 0;
_forEach(arr, function (x) {
iterator(x, function (err) {
if (err) {
callback(err);
callback = function () {};
}
else {
completed += 1;
if (completed === arr.length) {
callback();
}
}
});
});
};
var _forEach = function (arr, iterator) {
if (arr.forEach) {
return arr.forEach(iterator);
}
for (var i = 0; i < arr.length; i += 1) {
iterator(arr[i], i, arr);
}
};
you could do something like:
Give formatAssignee a callback.
Count down how many users you need to push onto users.
After you push the last one, invoke the callback with the parameter users.

Synchronous database queries with Node.js

I have a Node.js/Express app that queries a MySQL db within the route and displays the result to the user. My problem is how do I run the queries and block until both queries are done before redirecting the user to the page they requested?
In my example I have 2 queries that need to finish before I render the page. I can get the queries to run synchronously if i nest query 2 inside the 'result' callback of query 1. This however will become very convoluted when the number of queries increase.
How do I go about running multiple (in this case 2) database queries synchronously without nesting the subsequent query in the prior query's 'result' callback?
I've looked at the 'Flow control / Async goodies' in the Node modules and tried flow-js but I can't get it to work with the async queries.
Listed below are the 2 queries that I'm attempting to execute from the '/home' route. Can the Node experts explain the 'right' way to do this.
app.get('/home', function (req,res) {
var user_array = [];
var title_array = [];
// first query
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
req.session.user_array = user_array;
});
// second query
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
req.session.title_array = title_array;
});
// because the queries are async no data is returned to the user
res.render('home.ejs', {layout: false, locals: { user_name: user_array, title: title_array }});
});
The goal with node is not to care what order things happen in. This can complicate some scenarios. There is no shame in nesting callbacks. Once you are used to how it looks, you may find that you actually prefer that style. I do; it is very clear what order callbacks will fire. You can forgo the anonymous functions to make it less verbose if you have to.
If you are willing to restructure your code a bit, you can use the "typical" nested callback method. If you want to avoid callbacks, there are numerous async frameworks that will try and help you do this. One that you might want to check out is async.js (https://github.com/fjakobs/async.js). Example of each:
app.get('/home', function (req,res) {
var lock = 2;
var result = {};
result.user_array = [];
result.title_array = [];
var finishRequest = function(result) {
req.session.title_array = result.title_array;
req.session.user_array = result.user_array;
res.render('home.ejs', {layout: false, locals: { user_name: result.user_array, title: result.title_array }});
};
// first query
var q1 = function(fn) {
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
result.user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
return fn && fn(null, result);
});
};
// second query
var q2 = function(fn) {
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
result.title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
return fn && fn(null, result);
});
}
//Standard nested callbacks
q1(function (err, result) {
if (err) { return; //do something}
q2(function (err, result) {
if (err) { return; //do something}
finishRequest(result);
});
});
//Using async.js
async.list([
q1,
q2,
]).call().end(function(err, result) {
finishRequest(result);
});
});
For a one-off, I would probably just use a reference counting type approach. Simply keep track of how many queries you want to execute and render the response when they have all finished.
app.get('/home', function (req,res) {
var lock = 2;
var user_array = [];
var title_array = [];
var finishRequest = function() {
res.render('home.ejs', {layout: false, locals: { user_name: user_array, title: title_array }});
}
// first query
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
req.session.user_array = user_array;
lock -= 1;
if (lock === 0) {
finishRequest();
}
});
// second query
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
req.session.title_array = title_array;
lock -= 1;
if (lock === 0) {
finishRequest();
}
});
});
An even nicer approach would be to simply call finishRequest() in each 'result' callback an check for non-empty arrays before you render the response. Whether that will work in your case depends on your requirements.
Here's a really easy trick to handle multiple callbacks.
var after = function _after(count, f) {
var c = 0, results = [];
return function _callback() {
switch (arguments.length) {
case 0: results.push(null); break;
case 1: results.push(arguments[0]); break;
default: results.push(Array.prototype.slice.call(arguments)); break;
}
if (++c === count) {
f.apply(this, results);
}
};
};
Example
Usage:
var handleDatabase = after(2, function (res1, res2) {
res.render('home.ejs', { locals: { r1: res1, r2: res2 }):
})
db.execute(sql1).on('result', handleDatabase);
db.execute(sql2).on('result', handleDatabase);
So basically you need reference counting. This is the standard approach in these situations. I actually use this small utility function instead of flow control.
If you want a full blown flow control solution I would recommend futuresJS
I find that the async library is the best for things like this. https://github.com/caolan/async#parallel
I can't test this or anything, so forgive me if there are some typos. I refactored your query function to be reusable. So, calling queryRows will return a function that matches the format of the async module's parallel callback functions. After both queries are complete, it will call the last function and pass the result of the two queries as an argument, which you can read to pass to your template.
function queryRows(col, table) {
return function(cb) {
var rows = [];
db.execute('SELECT ' + col + ' FROM ' + table)
.on('row', function(r) {
rows.push(r)
})
.on('result', function() {
cb(rows);
});
}
}
app.get('/home', function(req, res) {
async.parallel({
users: queryRow('user_name', 'users'),
titles: queryRow('title', 'code_samples')
},
function(result) {
res.render('home.ejs', {
layout: false,
locals: {user_name: result.users, title: result.titles}
});
});
});
There are some solutions here, but in my opinion the best solution is to make the code synchronously in a very easy way.
You could use the "synchonize" package.
Just
npm install synchronize
Then var sync = require(synchronize);
Put logic which should be synchronous into a fiber by using
sync.fiber(function() {
//put your logic here
}
An example for two mysql queries:
var express = require('express');
var bodyParser = require('body-parser');
var mysql = require('mysql');
var sync = require('synchronize');
var db = mysql.createConnection({
host : 'localhost',
user : 'user',
password : 'password',
database : 'database'
});
db.connect(function(err) {
if (err) {
console.error('error connecting: ' + err.stack);
return;
}
});
function saveSomething() {
var post = {id: newId};
//no callback here; the result is in "query"
var query = sync.await(db.query('INSERT INTO mainTable SET ?', post, sync.defer()));
var newId = query.insertId;
post = {foreignKey: newId};
//this query can be async, because it doesn't matter in this case
db.query('INSERT INTO subTable SET ?', post, function(err, result) {
if (err) throw err;
});
}
When "saveSomething()" is called, it inserts a row in a main table and receives the last inserted id. After that the code below will be executed. No need for nesting promises or stuff like that.
option one: if all your queries related to each other, create stored procedure, put all your data logic into it and have a single db.execute
option two: if your db uses one connection then commands a guaranteed to be executed serially and you can use this as async helper
db.execute(sql1).on('row', function(r) {
req.session.user_array.push(r.user);
});
db.execute(sql2)
.on('row', function(r) {
req.session.title_array.push(r.title);
})
.on('end'), function() {
// render data from req.session
});
You can use fibers to write pseudo-synchronous code with Node.JS take a look at these tests for DB https://github.com/alexeypetrushin/mongo-lite/blob/master/test/collection.coffee
they are asynchronous but looks like synchronous, more details http://alexeypetrushin.github.com/synchronize

Resources