Design pattern for shared database handle in Node.JS - database

How do I share a database connection across various NodeJS modules? The example I have found all had a monolithic structure where the entire code was in one single app.js file.
/* main.js */
var foo = require("./foo");
/* express stuff ...*/
mysql = /* establish mysql connection */
app.get("/foo", foo.hello );
/* foo.js */
exports.hello = function(req, res) {
res.send("Hello from the foo module!");
}
How would I access "mysql" from my module "foo"? What is the recommended design pattern for this?

You can use the module pattern to easily pass your db object (and anything else) to modules that need it.
// users.js
module.exports = function( options ) {
var db = options.db;
var pants = options.pants; // or whatever
return {
GetUser: function( userID, callback ) {
db.query("....", function (err, results) {
callback(results)
});
},
AnotherFunc: function (...) {},
AndAnotherFunc: function (...) {}
};
};
You use this module like:
// make your db connection here
var users = require('./users.js')({
db: db,
pants: 'blue'
});
users.GetUser( 32, function( user ) {
console.log("I got the user!");
console.log( user );
});
I find this to be a great way to write modules, as it's a lot like making an actual Class object, like in C++. You can even simulate 'private' methods/parameters.

I usually put mysql handle in a different file (module) and require the module in different routes.
I believe you also have to connect to mysql asynchronously, you can refer to this question, which uses a callback function to solve the problem.

Related

Can't figure out where to initiate CronJob in react app

I have a react app, which must perform a weekly task every Monday #7:58 am. The task is setup as a separate function "notification()". And I want to use the 'CRON' package from NPM to call notification() at the appropriate time.
I have CRON wrapped inside of a function like this:
let mondayNotif = () => {
new CronJob('* 58 7 * * 2', function() {
notification()
}, null, true, 'America/Los_Angeles');
};
My question: where should I call the function mondayNotif(), to make sure that the CronJob is initiated correctly? I thought at first it must be on the backend, but the NPM package doesn't seem to support server-side. But if I call mondayNotif() on the client side, will the CronJob still happen if the site is inactive?
From what I know React JS is front end - it runs on client side. You need a server. In this case a node.js based server. Theroetically if nobody opens the website nothing will be fired up in react js. Look up how to schedule cron jobs on node.js
enter link description here
I found my own answer. But first, a few insights on CronJobs that would have helped me:
CronJobs are essentially a third-party function with an embedded clock. Once they are "initiated", you don't have to call them. The third-party calls them remotely, based on the time that you scheduled in the parameters (ie: "30 6 * * 5").
There is some discrepancy in different blogs about the CRON time. For instance some blogs insisted there are 6 time variables, but I found it worked with 5.
CronJobs should be in a separate file from the body of your main code, typically at the top of your folder structure near your "package.json" & "server.js" files.
It seems to be cleanest to setup all of your CronJob utilities directly inside the cronjob.js file. For instance: I used a separate database connection directly in cronjob.js and by-passed the api routes completely.
CronJobs should be initiated exactly once, at the beginning of the app launch. There are a few ways to do this: package.json or server.js are the most obvious choices.
Here is the file structure I ended up using:
-App
--package.json
--server.js
--cronjob.js
--/routes
--/src
--/models
--/public
...And then I imported the cronjob.js into "server.js". This way the cronjob function is initiated one time, when the server.js file is loaded during "dev" or "build".
For reference, here's the raw cronjob.js file (this is for an email notification):
const CronJob = require('cron').CronJob;
const Department = require('./models/department.js');
const template_remind = require('./config/remindEmailTemplate.js');
const SparkPost = require('sparkpost');
const client = new SparkPost('#############################');
const mongoose = require("mongoose");
const MONGODB_URI =
process.env.MONGODB_URI || "mongodb://localhost:27017/app";
mongoose.Promise = Promise;
// -------------------------- MongoDB -----------------------------
// Connect to the Mongo DB
mongoose.connect(MONGODB_URI, { useNewUrlParser: true }, (err, db) => {
if (err) {
console.log("Unable to connect to the mongoDB server. Error:", err);
} else {
console.log("Connection established to", MONGODB_URI);
}
});
const db = mongoose.connection;
// Show any mongoose errors
db.on("error", error => {
console.log("Mongoose Error: ", error);
});
// Once logged in to the db through mongoose, log a success message
db.once("open", () => {
console.log("Mongoose CRON connection successful.");
});
// ------------------------ Business Logic --------------------------
function weekday(notifications) {
Department.find({"active": true, "reminders": notifications, "week": {$lt: 13}}).distinct('participants', function(err, doc) {
if(err){
// console.log("The error: "+err)
} else {
console.log("received from database... "+JSON.stringify(doc))
for(let i=0; i<doc.length; i++){
client.transmissions.send({
recipients: [{address: doc[i]}],
content: {
from: 'name#sparkmail.email.com',
subject: 'Your email notification',
html: template_remind()
},
options: {sandbox: false}
}).then(data => {})
}
}
})
}
function weeklyNotif() {
new CronJob('45 7 * * 1', function() {weekday(1)}, null, true, 'America/New_York');
new CronJob('25 15 * * 3', function() {weekday(2)}, null, true, 'America/New_York');
new CronJob('15 11 * * 5', function() {weekday(3)}, null, true, 'America/New_York');
}
module.exports = weeklyNotif()
As you can see, I setup a unique DB connection and email server connection (separate from my API file), and ran all of the logic inside this one file, and then exported the initiation function.
Here's what appears in server.js:
const cronjob = require("./cronjob.js");
All you have to do here is require the file, and because it is exported as a function, this automatically initiates the cronjob.
Thanks for reading. If you have feedback, please share.
Noway, do call CronJob from client-side, because if there are 100 users, CronJob will be triggered 100 times. You need to have it on Server-Side for sure

FeathersJS custom API method that retrieves enum Type, to fill a Dropdown in React

So I'm trying to fill a select component with a enum type from mongoose
In my user service the schema looks something like :
firstName: { type:String, required: true },
...
ris:{type: String, default: 'R', enum:['R', 'I', 'S']},
In my feathers service I can access the Model with "this.Model"
so in any hook I can do:
this.Model.schema.path('ris').enumValues); //['R','C','I']
and I get the values from the enum type.
Now since I can't create custom API methods other that the officials ones
Feathers calling custom API method
https://docs.feathersjs.com/clients/readme.html#caveats
https://docs.feathersjs.com/help/faq.html#can-i-expose-custom-service-methods
How can I create a service method/call/something so that I can call it in my
componentDidMount(){ var optns= this.props.getMyEnumsFromFeathers}
and have the enum ['R','C','I'] to setup my dropdown
I'm Using React/Redux/ReduxSaga-FeathersJS
I'd create a service for listing Enums in the find method:
class EnumService {
find(params) {
const { service, path } = params.query;
const values = this.app.service(service).Model.schema.path(path).enumValues;
return Promise.resolve(values);
}
setup(app) {
this.app = app;
}
}
app.use('/enums', new EnumService())
Then on the client you can do
app.service('enums').find({ query: {
service: 'myservice',
path: 'ris'
}
}).then(value => console.log('Got ', values));
I was trying to use this code, but, it does not work like plug and play.
after some play with the app service I figured out the code below
async find(params) {
const { service, path } = params.query;
const values = await this.app.service(service).Model.attributes[path].values;
return values || [];
}
setup(app) {
this.app = app;
}
I am not sure if it is a thing of what database is been used, in my case I am in development environment, so, I am using sqlite.

How to edit server stored JSON file with NodeJS

I have a web server hosting over localhost. The website I am accessing is a "Todo list app" written with AngularJS. To load the todo's, the browser gets a JSON file with the information. An example of this:
[
{"name":"Clean the house"},
{"name":"Water the dog"},
{"name":"Feed the lawn"},
{"name":"Pay dem bills"},
{"name":"Run"},
{"name":"Swim"}
]
It then loops through all the items and "prints" them out onto the website. I have various options like "Save" and "Delete". They work client-side, but that way does not allow me to properly save them, as when the browser is refreshed, all the content is reset with the server's static JSON file.
I was wondering if there was some way of using NodeJs to host the website and listen for incoming AJAX request and edit the content in the file based off that.
Writing a file asynchronously in nodejs can be done as follows.
var fs = require('fs');
var fileName = './file.json';
var file = require(fileName);
file.key = "new value"; // This will be coming as a http POST method from your view
fs.writeFile(fileName, JSON.stringify(file), function (err) {
if (err) return console.log(err);
console.log(JSON.stringify(file));
console.log('writing to ' + fileName);
});
The caveat is that json is written to the file on one line and not prettified. ex:
{
"key": "value"
}
will be...
{"key": "value"}
To avoid this, simply add these two extra arguments to JSON.stringify
JSON.stringify(file, null, 2)
null - represents the replacer function. (in this case we don't want to alter the process)
2 - represents the spaces to indent.
NodeJS does not persist data out of the box.
You want something like NodeJS + Express and special CRUD routes like POST for creating items or DELETE for deleting them.
In this routes you have to add a data persistance layer like mongoose if you want to use MongoDB or Sequelize if you want to add a SQL database behind it.
Each of this ORM requires to specify a Datamodel which can be saved.
Here an example for a mongoose implementation:
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema;
var plugins = require('./model.server.plugins');
/**
* Customer Schema
*/
var CustomerSchema = new Schema({
name: {
type: String,
default: '',
required: 'Please fill a name',
trim: true
},
created: {
type: Date,
default: Date.now
}
});
mongoose.model('Customer', CustomerSchema);
Here is the controller
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors.server.controller'),
Customer = mongoose.model('Customer'),
_ = require('lodash');
/**
* Create a Customer
*/
exports.create = function(req, res) {
var customer = new Customer(req.depopulated);
customer.user = req.user;
customer.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(customer);
}
});
};
Here the route
router.route('/')
.post(customers.create);

Node JS - Use Printer (Hardware)

I'm developing an application with Angular JS and Node JS where I need to see all available printers in the user's machine, let the user select one of them and print a receipt.
Is there a way to achieve this?
I did an application like that.. I did it using http://nwjs.io/ and the module in the comment: https://www.npmjs.com/package/printer, here is a working code with this module printing raw in the default printer a file:
var printer = require('printer');
var fs = require('fs');
var info = fs.readFileSync('ticket.txt').toString();
function sendPrint() {
printer.printDirect({
data: info,
type: 'RAW',
success: function (jobID) {
console.log("ID: " + jobID);
},
error: function (err) {
console.log('printer module error: '+err);
throw err;
}
});
}
sendPrint();
you can adapt it to use the getPrinters() method to enumerate all installed printers and then allow the user to select the desired one.

Connection state with doowb/angular-pusher

I am trying to build an Angular project with Pusher using the angular-pusher wrapper. It's working well but I need to detect when the user loses internet briefly so that they can retrieve missed changes to data from my server.
It looks like the way to handle this is to reload the data on Pusher.connection.state('connected'...) but this does not seem to work with angular-pusher - I am receiving "Pusher.connection" is undefined.
Here is my code:
angular.module('respondersapp', ['doowb.angular-pusher']).
config(['PusherServiceProvider',
function(PusherServiceProvider) {
PusherServiceProvider
.setToken('Foooooooo')
.setOptions({});
}
]);
var ResponderController = function($scope, $http, Pusher) {
$scope.responders = [];
Pusher.subscribe('responders', 'status', function (item) {
// an item was updated. find it in our list and update it.
var found = false;
for (var i = 0; i < $scope.responders.length; i++) {
if ($scope.responders[i].id === item.id) {
found = true;
$scope.responders[i] = item;
break;
}
}
if (!found) {
$scope.responders.push(item);
}
});
Pusher.subscribe('responders', 'unavail', function(item) {
$scope.responders.splice($scope.responders.indexOf(item), 1);
});
var retrieveResponders = function () {
// get a list of responders from the api located at '/api/responders'
console.log('getting responders');
$http.get('/app/dashboard/avail-responders')
.success(function (responders) {
$scope.responders = responders;
});
};
$scope.updateItem = function (item) {
console.log('updating item');
$http.post('/api/responders', item);
};
// load the responders
retrieveResponders();
};
Under this setup how would I go about monitoring connection state? I'm basically trying to replicate the Firebase "catch up" functionality for spotty connections, Firebase was not working overall for me, too confusing trying to manage multiple data sets (not looking to replace back-end at all).
Thanks!
It looks like the Pusher dependency only exposes subscribe and unsubscribe. See:
https://github.com/doowb/angular-pusher/blob/gh-pages/angular-pusher.js#L86
However, if you access the PusherService you get access to the Pusher instance (the one provided by the Pusher JS library) using PusherService.then. See:
https://github.com/doowb/angular-pusher/blob/gh-pages/angular-pusher.js#L91
I'm not sure why the PusherService provides a level of abstraction and why it doesn't just return the pusher instance. It's probably so that it can add some of the Angular specific functionality ($rootScope.$broadcast and $rootScope.$digest).
Maybe you can set the PusherService as a dependency and access the pusher instance using the following?
PusherService.then(function (pusher) {
var state = pusher.connection.state;
});
To clarify #leggetters answer, you might do something like:
app.controller("MyController", function(PusherService) {
PusherService.then(function(pusher) {
pusher.connection.bind("state_change", function(states) {
console.log("Pusher's state changed from %o to %o", states.previous, states.current);
});
});
});
Also note that pusher-js (which angular-pusher uses) has activityTimeout and pongTimeout configuration to tweak the connection state detection.
From my limited experiments, connection states can't be relied on. With the default values, you can go offline for many seconds and then back online without them being any the wiser.
Even if you lower the configuration values, someone could probably drop offline for just a millisecond and miss a message if they're unlucky.

Resources