Post SQL data to Angular View with NodeJS - angularjs

I'm having a difficult time posting data retrieved from a server using mysql with node. I have connected to my db successfully, and I can return the data I want by console logging it to the CLI when running "node server.js". However, I'm not sure how to post this data to my Angular view. No problem console logging, but this doesn't help me get data to the application.
For the moment, I'm just trying to get the data to index.html, which is my primary view and holds my ng-view portion for Angular routing. I'm probably missing something obvious bc I'm new to NodeJS.
// MODULES
var express = require('express');
var app = express();
var port = process.env.PORT || 3000;
var mysql = require('mysql');
var serveStatic = require('serve-static');
var bodyParser = require('body-parser');
var source = __dirname + '/public/views/index.html';
app.use(serveStatic(__dirname, {'index': ['index.html']}));
app.route('/*')
.get(function(req, res) {
res.sendFile(source);
});
var data;
var connection = mysql.createConnection({
host : 'thehostdb',
user : 'username', // credentials correct, connection works
password : 'pw',
database : 'db',
port: '3306'
});
connection.query('SELECT * from poemTable', function(err, rows, fields) {
if (!err) {
data = JSON.stringify(rows);
setDataValue(data);
}
else {
console.log('Error while performing Query:', err);
}
});
function setDataValue(value) {
data = value;
console.log(data); //Where the data logs
}
app.listen(port, function () {
console.log('Example app listening on port' + port + '!')
})

You have to understand what this code does, and how nodejs and angular are supposed to work together. Angular is served to the client and then rendered by the clients browser. So if you want to inject data you have to fetch it. So in your angular app when the controller starts make an api call, and in your server create a new route:
app.get('/data', function(req, res, next) {
connection.query(..., function(err, rows, fields) {
res.json(rows);
});
});
Make sure you understand node and it's async nature, what is event loop and how it works and what is callback hell, also I would check out promises and other tutorials on nodeschool.io, it's a great place to start with node :)

Related

socket.io emit not working properly inside express route

I use soket.io in Node.js it's working fine, but there is an issue in the routes files.
Below is my code and flow.
I have server.js file where I define io.
var express = require('express'),
app = express(),
http = require('http').Server(app);
var io = require('socket.io').listen(http);
//io from server.js
io.on('connection', function (socket) {
socket.emit('getDevicePostData', { message : 'Hi from server!!' });
socket.on('sendDevicePostData', function("Hi from server") {});
});
// pass io to routes file
var smart_control = require('./smart_control.js')(io);
app.use('/', smart_control);
And smart_control.js file code is below
module.exports = function(io) {
router.post('/emitdata', function(req, res, next) {
io.sockets.emit('getDevicePostData', { message : 'Hi from route!!' });
});
return router;
}
Above route's emit sometimes work and sometimes does not work.
And this emit is called from angular js. I use https://www.npmjs.com/package/angular-socket-io package in angular
Below code in angular controller:
mySocket.on('getDevicePostData', function(data) {
console.log(data);
});
Here mySocket is factory.
What is wrong in my code. Mainly problem from routes file only.
You just need to replace io.sockets.emit with io.emit.
Here io.sockets.emit will emit to only connected clients and your socket factory might not be up by the time you hit the API.

NodeJS- Export from SQL Server according to Stream

I work on a web application in NodeJS, and now I want add a module for exporting massive records from my SQL Server's database (10.000.000 records) according to stream/socket to a .CSV file.
If 100 users downloading data (.csv) from server, increased lot of memory usage in server / client.
I want if possible, save data(.csv) into hard drive(client), no into server/client Memory(RAM).
Server.js
var http = require('http');
var express = require('express');
var app = express();
var server = http.Server(app);
var io = require('socket.io')(server);
io.on('connection', function (socket) {
console.log('Connection is ready!')
socket.on('get_records', function (data) {
var connection = new sql.Connection(config, function (err) {
if (err) {
console.log(err.message);
}
var request = new sql.Request(connection);
request.stream = true;
request.query("Select * from my_table");
// ... error checks
request.on('recordset', function (recordset) {
// Emitted once for each recordset in a query
});
request.on('row', function (record) {
// Emitted for each row in a recordset
socket.emit('recieve_records', record); //send record by record to client
});
request.on('error', function (err) {
console.log(err.message);
});
request.on('done', function (returnValue) {
// Always emitted as the last one
});
});
});
Edit: See below post
StreamSaver.js - Error in downloading (Network failed)
If you transfer a file using socket.io there is no easy/robust way to initiate the download dialog. I found 3 solutions to save the file:
This answer. But you'll have to keep the whole file in RAM before save. + it has max blob size restriction.
FileSaver module. Same idea, wrapped into a module (5k stars at GitHub). Still restricted to blob size, and keeps everything in memory.
StreamSaver module. Doesn't have blob size restriction. But doesn't work at all in Firefox, IE, Edge.
That is why I suggest you to use simple HTTP for file transfers.
Then you could simply use <a href="path/to/your/endpoint"> tag to download it or use some tricks from here.
So in case you have Node.js Readable stream that emits objects you can use 'csv' module and convert it to 'csv' on the fly. Then simply pipe it to Express response object.
var csv = require('csv');
router.get('/csv', function (req, res, next) {
//Handle the connection here, you might decide to use connection pool is supported
new sql.Connection(config, function (err) {
if (err) {
console.log(err.message);
}
//You can pipe mssql request as per docs
var request = new sql.Request(connection);
request.stream = true;
request.query("Select * from my_table");
var stringifier = csv.stringify({header: true});
//Then simply call attachment and pipe it to the response
res.attachment('sample.csv');
request.pipe(stringifier).pipe(res);
});
});
Also checkout the csv-stringify docs as there are useful options such as headers: true (to add headers row) and others.

Asynchronous calls with AngularJS and Node.js

New to AngularJS and Node.js. Please advise.
Because the data I want to display on the page takes quite some time to load. I decide to load the fast data from database_1 first, and then get the slow response from database_2 later. Here is my AngularJS:
var app = angular.module('myApp', [
]);
app.factory('rateFactory', function ($http) {
return $http.get("localhost/rate"); // api or node.js to return only issueId and rate
})
app.controller('SignupController', function ($scope, $http, $filter, rateFactory) {
// Display most of the content first
$scope.showData = function () {
$http.get("localhost/signup") // api or node.js
.success(function (response) {
$scope.Signups = response;
$scope.curPage = 0;
$scope.pageSize = 25;
$scope.numberOfPages = function () {
return Math.ceil($scope.Signups.length / $scope.pageSize);
};
})
.error(function (data, status, headers, config) {
alert(status);
});
}
// Display slow response, Rate, later based on the issueId
$scope.showRate = function (issueId) {
rateFactory
.success(function (data) {
document.getElementById(issueId).innerHTML = data.find(x => x.IssueID === issueId).Rate;
})
.error(function (data, status, headers, config) {
//alert(status);
});
}
});
I wonder whether there is any better way to do it. This my first question.
Next question is about Node.js. If I get the data from ashx or api, it returns the data without any problem. But when using Node.js for both calls, it's a hit and miss. Sometimes it works fine, but most of the time, the 2nd call fails. Am I doing something wrong? Both returns the data perfectly if calling individually. Here is the node.js code:
var express = require('express');
var http = require('http');
var app = express();
var usp2Json = require('./lib/usp2Json.js');
app.get('/iisnode/myApp/signup', function(req, res) {
usp2Json.getJsonFromStoredProc('stroedprocToGetSignup', req, res);
});
app.get('/iisnode/myApp/rate', function(req, res) {
usp2Json.getJsonFromStoredProc('stroedprocToGetRate', req, res);
})
var server = http.createServer(app);
var port = process.env.PORT || 593;
server = app.listen(port, function() {
console.log('Server is running...');
});
usp2Json.js is a custom module to get data from SQL Server with a stored procedures:
exports.getJsonFromStoredProc = function(storedproc, req, res) {
var sql = require("mssql");
var express = require('express');
var app = express();
res.header('Access-Control-Allow-Origin', '*');
res.header('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS');
res.header('Access-Control-Allow-Headers', 'Content-Type, Authorization, Content-Length, X-Requested-With');
// config database
var config = {
user: 'username',
password: 'password',
server: 'servername',
database: 'databasename',
};
// connect to database
sql.connect(config, function(err) {
if(err) console.log(err);
// create Request object
var request = new sql.Request();
// query to the database and get the records
request.query(storedproc, function(err, recordset) {
if(err)
console.log(err);
// send records as a response
res.send(recordset);
});
});
}
One suggestion I have is to get rid of:
document.getElementById(issueId).innerHTML = data.find(x => x.IssueID === issueId).Rate;
And use regular Angular 2 way data binding, and bind your #issueId element to a $scope.issueId scope variable, and update the scope variable on call success. The way it is done right now is sort of an anti-pattern.
In terms of the NodeJS API call, you will need to show us what the route handler i.e. usp2Json.getJsonFromStoredProc does in it's code. Otherwise your code looks perfectly fine
In terms of the NodeJS API call, the issue is actually the SQL Server connection. When I looked at console.log, it doesn't give me enough information, but simply says "Server is running". I had to add a line to get the details of error:
request.query(storedproc, function (err, recordset) {
if (err) {
fs.appendFile("path"+ datetime+".txt", time + " Error on executing " + storedproc + " - " + err + " \r\n")
//throw (err);
}
// send records as a response
res.send(recordset);
});
This gives me "ConnectionError: Connection is closed.". With this information, I was able to find the solution from here:
https://github.com/patriksimek/node-mssql/issues/138
and answer from Stackoverflow: How can I use a single mssql connection pool across several routes in an Express 4 web application?

How to retrieve response from server using angularjs?

I am sending filename to server and trying to get some sort of response to make sure api calls are working but its always returing 404 even though path is correct, I am not sure where i am making mistake , Any idea ?
angularjsFactory.js
getFile:function(file_name){
return $http.get("/file?file_name="+file_name);
}
server.js
var express = require('express');
var app = express();
var fs = require('fs');
app.use(express.static(__dirname + "/public"));
var readDirectory = require('./readDirectory');
app.get('/file', function(req, res) {
var path = './ditLogs';
var fileParam = req.query.file_name;
console.log(fileParam);
fs.readdir(path, function(err, items) {
items.forEach(function(items) {
if (fileParam === items) {
res.sendFile(items);
console.log(items);
}
});
});
});
app.listen(3000, function() {
console.log('Example app listening on port 3000!');
//console.log('app is printing data',obj);
//ditProducer.startProducer();
setTimeout(function() {
ditconsumer.start();
}, 2000);
});
Your route on server is not correct comparing to the call from angular's side. You are not using route parameters, but query string.
Try this:
app.get('/file', function (req, res) {
// get filename using: req.query.file_name
res.send('FIle data');
});
Or you can try this on angular and keep the server code:
$http.get("/file/"+ encodeURI(file_name));
Server side code looks incorrect.
app.get('/file', function (req, res) {
console.log(req.query.file_name);//This will give you filename passed as url param
res.send('FIle data');
});

How to create RestFull api with express and node.js?

I am very new to node and express i read some documentation but i did not get any solid understanding how to create rest api with node, So with below basic code i just want to create get api with express and return response to angularjs factory method.I would like to get help and better understanding for the following .
1- How to return response with GET api ?
2- If we have json object how can i pass that data using GET api ?
app.js
var express = require('express');
var path = require('path');
var app = express();
app.use(express.static('./'));
var server = app.listen(3000, function(){
var host = server.address().address;
var port = server.address().port;
console.log('Example app listening at http',host,port);
});
app.get('/test', function(req, res) {
res.type('text/plain'); // set content-type
res.send('i am a beautiful butterfly'); // send text response
});
workerController.js
$scope.getTestData = function(){
alert('got function working');
workerFactory.getData().then(function(response){
var dataResponse = response.data;
console.log(dataResponse);
})
}
workerFactory.js
angular.module('myApp').factory('workerFactory', function ($http) {
'use strict';
return {
getData: function(){
return $http.get('/test');
}
}
});
For the second part, how to pass a JSON object back. You can change your API code to something like:
app.get('/test', function(req, res) {
res.json({message: 'i am a beautiful butterfly'}); // send a JSON response
});
I'm just working on the first part of the question
You can get a full working REST-full API code by using Amplication, it's an open-source for generating Node.js code by only defining your data model.
As you have the basic understanding on REST API you can use this node module
https://github.com/swagger-api/swagger-node for creating great REST API.

Resources