I work on a web application in NodeJS, and now I want add a module for exporting massive records from my SQL Server's database (10.000.000 records) according to stream/socket to a .CSV file.
If 100 users downloading data (.csv) from server, increased lot of memory usage in server / client.
I want if possible, save data(.csv) into hard drive(client), no into server/client Memory(RAM).
Server.js
var http = require('http');
var express = require('express');
var app = express();
var server = http.Server(app);
var io = require('socket.io')(server);
io.on('connection', function (socket) {
console.log('Connection is ready!')
socket.on('get_records', function (data) {
var connection = new sql.Connection(config, function (err) {
if (err) {
console.log(err.message);
}
var request = new sql.Request(connection);
request.stream = true;
request.query("Select * from my_table");
// ... error checks
request.on('recordset', function (recordset) {
// Emitted once for each recordset in a query
});
request.on('row', function (record) {
// Emitted for each row in a recordset
socket.emit('recieve_records', record); //send record by record to client
});
request.on('error', function (err) {
console.log(err.message);
});
request.on('done', function (returnValue) {
// Always emitted as the last one
});
});
});
Edit: See below post
StreamSaver.js - Error in downloading (Network failed)
If you transfer a file using socket.io there is no easy/robust way to initiate the download dialog. I found 3 solutions to save the file:
This answer. But you'll have to keep the whole file in RAM before save. + it has max blob size restriction.
FileSaver module. Same idea, wrapped into a module (5k stars at GitHub). Still restricted to blob size, and keeps everything in memory.
StreamSaver module. Doesn't have blob size restriction. But doesn't work at all in Firefox, IE, Edge.
That is why I suggest you to use simple HTTP for file transfers.
Then you could simply use <a href="path/to/your/endpoint"> tag to download it or use some tricks from here.
So in case you have Node.js Readable stream that emits objects you can use 'csv' module and convert it to 'csv' on the fly. Then simply pipe it to Express response object.
var csv = require('csv');
router.get('/csv', function (req, res, next) {
//Handle the connection here, you might decide to use connection pool is supported
new sql.Connection(config, function (err) {
if (err) {
console.log(err.message);
}
//You can pipe mssql request as per docs
var request = new sql.Request(connection);
request.stream = true;
request.query("Select * from my_table");
var stringifier = csv.stringify({header: true});
//Then simply call attachment and pipe it to the response
res.attachment('sample.csv');
request.pipe(stringifier).pipe(res);
});
});
Also checkout the csv-stringify docs as there are useful options such as headers: true (to add headers row) and others.
Related
I am using the following from http://www.tutorialsteacher.com/nodejs/access-sql-server-in-nodejs:
var express = require('express');
var app = express();
app.get('/', function (req, res) {
var sql = require("mssql");
// config for your database
var config = {
user: 'sa',
password: 'mypassword',
server: 'localhost',
database: 'SchoolDB'
};
// connect to your database
sql.connect(config, function (err) {
if (err) console.log(err);
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query('select * from Student', function (err, recordset) {
if (err) console.log(err)
// send records as a response
res.send(recordset);
});
});
});
var server = app.listen(5000, function () {
console.log('Server is running..');
});
When I run this file in Browser, the first time the page runs. But if I refresh it, it says the connection is not open. When I run this outside of the webserver context, it never exits the sql.connect function and needs to be stopped with Control-C in Node. Does anyone know why this code gets stuck in the sql.connect function?
First: The example you where using are based on version 2.3 of node-mssql. The current version ist now 4.1. One recommended way of using node-mssql is using connectionPools. I adapted your code to use pools.
Second: If there is an error, in your code you never reach the point res.send(). So I modified your code to send somethig back in case of an error.
One more hint: I would place the dependency at the top of your app (not within the route)... it will work either way, but your code gets a little bit more clear.
'use strict';
const express = require('express');
const app = express();
const sql = require("mssql");
// config for your database
const config = {
user: 'sa',
password: 'mypassword',
server: 'localhost',
database: 'SchoolDB',
options: {
encrypt: false
},
pool: {
max: 10,
min: 0,
idleTimeoutMillis: 30000
}
};
// create a connection pool
const pool = new sql.ConnectionPool(config, err => {
if (err) {
console.log(err);
}
});
app.get('/', function (req, res) {
// create Request object (using the connection pool)
const request = new sql.Request(pool);
// query to the database and get the records
request.query('select * from Student', (err, recordset) => {
if (err) {
console.log(err);
res.send(err);
} else {
// send records as a response
res.send(recordset);
}
});
});
var server = app.listen(5000, function () {
console.log('Server is running..');
});
Hope that helps (code above is not tested ...)
I'm having a difficult time posting data retrieved from a server using mysql with node. I have connected to my db successfully, and I can return the data I want by console logging it to the CLI when running "node server.js". However, I'm not sure how to post this data to my Angular view. No problem console logging, but this doesn't help me get data to the application.
For the moment, I'm just trying to get the data to index.html, which is my primary view and holds my ng-view portion for Angular routing. I'm probably missing something obvious bc I'm new to NodeJS.
// MODULES
var express = require('express');
var app = express();
var port = process.env.PORT || 3000;
var mysql = require('mysql');
var serveStatic = require('serve-static');
var bodyParser = require('body-parser');
var source = __dirname + '/public/views/index.html';
app.use(serveStatic(__dirname, {'index': ['index.html']}));
app.route('/*')
.get(function(req, res) {
res.sendFile(source);
});
var data;
var connection = mysql.createConnection({
host : 'thehostdb',
user : 'username', // credentials correct, connection works
password : 'pw',
database : 'db',
port: '3306'
});
connection.query('SELECT * from poemTable', function(err, rows, fields) {
if (!err) {
data = JSON.stringify(rows);
setDataValue(data);
}
else {
console.log('Error while performing Query:', err);
}
});
function setDataValue(value) {
data = value;
console.log(data); //Where the data logs
}
app.listen(port, function () {
console.log('Example app listening on port' + port + '!')
})
You have to understand what this code does, and how nodejs and angular are supposed to work together. Angular is served to the client and then rendered by the clients browser. So if you want to inject data you have to fetch it. So in your angular app when the controller starts make an api call, and in your server create a new route:
app.get('/data', function(req, res, next) {
connection.query(..., function(err, rows, fields) {
res.json(rows);
});
});
Make sure you understand node and it's async nature, what is event loop and how it works and what is callback hell, also I would check out promises and other tutorials on nodeschool.io, it's a great place to start with node :)
New to AngularJS and Node.js. Please advise.
Because the data I want to display on the page takes quite some time to load. I decide to load the fast data from database_1 first, and then get the slow response from database_2 later. Here is my AngularJS:
var app = angular.module('myApp', [
]);
app.factory('rateFactory', function ($http) {
return $http.get("localhost/rate"); // api or node.js to return only issueId and rate
})
app.controller('SignupController', function ($scope, $http, $filter, rateFactory) {
// Display most of the content first
$scope.showData = function () {
$http.get("localhost/signup") // api or node.js
.success(function (response) {
$scope.Signups = response;
$scope.curPage = 0;
$scope.pageSize = 25;
$scope.numberOfPages = function () {
return Math.ceil($scope.Signups.length / $scope.pageSize);
};
})
.error(function (data, status, headers, config) {
alert(status);
});
}
// Display slow response, Rate, later based on the issueId
$scope.showRate = function (issueId) {
rateFactory
.success(function (data) {
document.getElementById(issueId).innerHTML = data.find(x => x.IssueID === issueId).Rate;
})
.error(function (data, status, headers, config) {
//alert(status);
});
}
});
I wonder whether there is any better way to do it. This my first question.
Next question is about Node.js. If I get the data from ashx or api, it returns the data without any problem. But when using Node.js for both calls, it's a hit and miss. Sometimes it works fine, but most of the time, the 2nd call fails. Am I doing something wrong? Both returns the data perfectly if calling individually. Here is the node.js code:
var express = require('express');
var http = require('http');
var app = express();
var usp2Json = require('./lib/usp2Json.js');
app.get('/iisnode/myApp/signup', function(req, res) {
usp2Json.getJsonFromStoredProc('stroedprocToGetSignup', req, res);
});
app.get('/iisnode/myApp/rate', function(req, res) {
usp2Json.getJsonFromStoredProc('stroedprocToGetRate', req, res);
})
var server = http.createServer(app);
var port = process.env.PORT || 593;
server = app.listen(port, function() {
console.log('Server is running...');
});
usp2Json.js is a custom module to get data from SQL Server with a stored procedures:
exports.getJsonFromStoredProc = function(storedproc, req, res) {
var sql = require("mssql");
var express = require('express');
var app = express();
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization, Content-Length, X-Requested-With');
// config database
var config = {
user: 'username',
password: 'password',
server: 'servername',
database: 'databasename',
};
// connect to database
sql.connect(config, function(err) {
if(err) console.log(err);
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query(storedproc, function(err, recordset) {
if(err)
console.log(err);
// send records as a response
res.send(recordset);
});
});
}
One suggestion I have is to get rid of:
document.getElementById(issueId).innerHTML = data.find(x => x.IssueID === issueId).Rate;
And use regular Angular 2 way data binding, and bind your #issueId element to a $scope.issueId scope variable, and update the scope variable on call success. The way it is done right now is sort of an anti-pattern.
In terms of the NodeJS API call, you will need to show us what the route handler i.e. usp2Json.getJsonFromStoredProc does in it's code. Otherwise your code looks perfectly fine
In terms of the NodeJS API call, the issue is actually the SQL Server connection. When I looked at console.log, it doesn't give me enough information, but simply says "Server is running". I had to add a line to get the details of error:
request.query(storedproc, function (err, recordset) {
if (err) {
fs.appendFile("path"+ datetime+".txt", time + " Error on executing " + storedproc + " - " + err + " \r\n")
//throw (err);
}
// send records as a response
res.send(recordset);
});
This gives me "ConnectionError: Connection is closed.". With this information, I was able to find the solution from here:
https://github.com/patriksimek/node-mssql/issues/138
and answer from Stackoverflow: How can I use a single mssql connection pool across several routes in an Express 4 web application?
I'm trying to pass multiple parameters in a URL with no luck. I'm not sure if it makes a difference but I am doing it through Angular. I'm trying to send the request to a REST API backend that I know works for single requests. Here is what my backend looks like
index.js
var express = require('express');
var router = express.Router();
var game = require('./game');
router.get('/api/v1/gameRefined/:from_datepicker:to_datepicker:from_timepicker:to_timepicker:selectLevel', game.getAllRefined);
module.exports = router;
game.js
...dbconnection stuff...
var game={
getAllRefined: function(req, res) {
var refines = req.params;
console.log("getting refined games");
pool.getConnection(function(err, connection){
var query = connection.query('SELECT * FROM game WHERE date >= ? AND date <= ? AND time >= ? AND time <= ? and level = ?', [refines.from_datepicker, refines.to_datepicker, refines.from_timepicker, refines.to_timepicker, refines.selectLevel], function(err, rows) {
connection.release();
if(err) {
throw err;
}else{
res.json(rows);
}
});
})
},
}
module.exports = game;
I send the request from this factory
.factory('gameFactory', ['$http',
function($http) {
var _gameFactory = {};
_gameFactory.getRefinedGames = function(dateFrom,dateTo,timeFrom,timeTo,level) {
var encodedParam = encodeURIComponent(baseUrl + '/api/v1/gameRefined/?from_datepicker='+dateFrom+'&to_datepicker='+dateTo+'&from_timepicker='+timeFrom+'&to_timepicker='+timeTo+'&selectLevel='+level+'');
return $http.get(encodedParam);
}
return _gameFactory;
}])
That sends this request that comes back as a 404:
http://localhost:8100/http%3A%2F%2Flocalhost%3A3000%2Fapi%2Fv1%2FgameRefined%2F%3Ffrom_datepicker%3D2015-02-05%26to_datepicker%3D2015-02-19%26from_timepicker%3D12%3A00%26to_timepicker%3D18%3A00%26selectLevel%3D1
I have tried it encoded and not encoded, with forward slashs, with semi colons, but nothing has worked so far.
I don't know why a localhost gets appended at the start, but even trying it in postman without the first localhost still is a 404 error.
How should I be doing this? Any help is appreciated. Thank you.
First, other than separating the work into a factory, you aren't really using Angular here. Second, I would consider posting to your API, return JSON results from the API, and then use a promise to read the data back into Angular.
Use $http.post
Let Angular do this work for you. Something like the below will generate the URL request for you. The return value of $http is also a promise, so using .success and .error will allow you to parse any returned data as well, even if it is just a success or failure message - but it is a great method of passing data between server/API and client.
.factory('gameFactory', ['$http',
function($http) {
return {
reachAPI: function(dateFrom, dateTo) {
$http.post('http://localhost:8080/api/v1', {
'dateFrom': dateFrom,
'dateTo': dateTo
})
.success(function(data, status, headers, config) {
*things happen*
data.somethingReturned // from your API
});
}
}]);
Consider body-parser
I know you said you have confidence in your REST API structure, but body-parser is an Express middleware that can parse URL-encoded strings and may prove helpful in reading your data. Personally, I think it lends towards more readable code.
var app = express();
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
router.route('/api').post(function(req, res) {
*things happen*
req.body.dateFrom //find your data
*things happen*
res.json(returnedData);
});
Hope that helps
I have a busy indicator that appears if the HTTP request takes longer than 1 second. If it returns in less than 1 second, the busy indicator does not appear.
What's the best way to test this with protractor? I'm thinking I need two test cases:
it('should not show busy indicator because http response is quick', function () {
// ??
});
it('should show busy indicator because http response is taking longer than 1 second', function () {
// ??
});
One way to do this is to setup a configurable proxy server that the browser launched by Protractor uses. This proxy server can add a latency to requests, thus simulating a slow internet connection/network (or at least, simulating it enough for your purposes).
Such a proxy server + mini-API to allow it to be configurable is like:
// proxy.js
// Config ports
var PROXY_PORT = 8001;
var CONFIG_PORT = 8002;
// Latency used for all requests
var latency = 0;
// Proxy
var hoxy = require('hoxy');
var proxy = new hoxy.Proxy();
proxy.intercept('request', function(req, resp) {
req.slow({latency: latency});
});
proxy.listen(PROXY_PORT);
// API Server for tests to configure latency
var express = require('express');
var app = express();
var bodyParser = require('body-parser');
app.use(bodyParser.json());
app.post('/', function(req, res) {
latency = req.body.latency;;
res.send('');
});
app.listen(CONFIG_PORT);
The above requests the npm packages hoxy, express and body-parser, and each can be installed by commands like
npm install hoxy --save-dev
The above server, assuming it's saved to proxy.js can be launched on the command line by
node proxy.js
Note that it listens on 2 ports: 8001 for a simple HTTP proxy that adds a latency to requests, and 8002 for a simple API that listens on / that accepts a JSON object with a single key: latency with a number of milliseconds to slow all subsequent requests via the proxy.
To use the proxy in Protractor, you can add it to its configuration file, for example for Chrome, by:
capabilities: {
browserName: 'chrome',
proxy: {
proxyType: 'manual',
httpProxy: 'localhost:8001'
}
}
Then in your spec file, you can create a function to set the latency for subsequent requests by
describe('My Component', function() {
var http = require('http');
function setLatency(latency) {
return browser.controlFlow().execute(function () {
var deferred = protractor.promise.defer();
var data = JSON.stringify({latency:latency});
var req = http.request({
method: 'POST',
host: 'localhost',
port: 8002,
headers: {
'Content-Type': 'application/json',
'Content-Length': data.length
}
}, function() {
deferred.fulfill();
});
req.write(data);
req.end();
return deferred.promise;
});
}
...
Note that we add the code to the control flow, and return a promise that is resolved once the request to set the latency has completed. This means we can use it in the same natural way as other protractor element finders/expectations, without having to use manual then callbacks.
Your actual tests depend on exactly how you're showing the busy indicator, but you should be able to do something like the following:
it('should not show busy indicator because http response is quick', function () {
setLatency(900);
element(by.css('button')).click();
browser.ignoreSynchronization = true;
browser.sleep(500);
expect(element(by.css('.loading-indicator')).isPresent()).not.toBe(true);
browser.ignoreSynchronization = false;
setLatency(0);
});
it('should show busy indicator because http response is taking longer than 1 second', function () {
setLatency(2000);
element(by.css('button')).click();
browser.ignoreSynchronization = true;
browser.sleep(1000);
expect(element(by.css('.loading-indicator')).isPresent()).toBe(true);
browser.ignoreSynchronization = false;
setLatency(0);
});
The above tests aren't perfect. Specifically the test that that busy indicator doesn't show doesn't actually show that it never appears. It just tests that it doesn't appear 500ms into a 900ms request.
Note the setting of browser.ignoreSynchronization = false. If we don't set it to false, then the test won't progress untill all $http requests have finished, which we don't want to do in this case, as we're testing the behaviour of what happens during the request.