How do you use angularjs service to call pouchdb and return the data to the controller? I have been working on a ionic app with pouchdb for local storage. I have a simple crud app built in a controller. Now I want to start to move the pouchdb calls into a service. I haven’t been able to get back data from the service. How would I use a service to call pouchdb to get all docs and return it to the controller?
One strategy that I think could work very well for Angular services is this one. It describes a method for keeping an in-memory array synced with the result of PouchDB's allDocs().
Since it's an array that automatically stays synced with PouchDB, you can just do an ng-repeat on it, and you're done. :)
Although your question is a year old, it deserves an answer.
You might want more than one service i.e. one to use in the controller and another for the backend database storage. For example, in the controller:
(function () {
'use strict';
angular
.module('app.services')
.factory('db',db);
db.$inject = ['$db'];
function db($db) {
var data = {}; // set up a data object to receive document(s)
return {
getDoc: getDoc,
getList: getList,
save: save,
saveBatch: saveBatch
};
// get a single document using the id
function getDoc(id) {
$db.getDoc(id)
.then(
function onSuccess(doc) {
// success so update the view model
angular.extend(data,doc); // use angular.extend to shallow copy object so that it can be returned in full
},
function onError() {
// failure to get document
}
);
return data;
}
// retrieve a group of documents where key is the prefix of the data you want
function getList(key) {
$db.getList(key).then(
function onSuccess(docs) {
// success so update the view model details
angular.forEach(docs.rows, function (value) {
this.push(value.doc);
}, data);
// now you can sort data or anything else you want to do with it
},
function onError() {
// no data found
}
);
return data;
}
// save a single viewItem
function save(viewItem) {
$db.update(viewItem).then(
function onSuccess() {
// success so update view model if required
},
function onError(e) {
console.log(e); // unable to save
}
);
}
// save an array of viewItems
function saveBatch(viewItems) {
$db.updateBatch(viewItems).then(
function onSuccess() {
// success so update the view model if required
},
function onError(e) {
console.log(e); // unable to save
}
);
}
}
})();
For the backend, something like this:
(function () {
'use strict';
angular
.module('app.services')
.factory('$db',$db);
$db.$inject = ['$q'];
function $db($q) {
var db;
return {
setLocalDB: setLocalDB,
update: update,
updateBatch: updateBatch,
getDoc: getDoc,
getAllDocs: getAllDocs,
getList: getList
};
// ------ DATABASE OPENING HANDLER(S) ------
// set to any named database
function setLocalDB(dbName) {
db = new PouchDB(dbName);
return db.info()
.catch(failedCheck()); // returns a promise to either work or fail
}
// return a rejection for a failure
function failedCheck() {
return $q.reject();
}
// ------ DOCUMENT HANDLING ------
// update document but if errors occur recurse qUpdate until either complete or retries exhausted
function update(doc) {
var counter = 0;
return $q.when(qUpdate(doc,counter));
}
// this routine works for both new and existing documents
function qUpdate(doc,counter) {
return db.put(doc)
.then(function() {
console.log('success - new document');
})
.catch(function(e) {
console.log(e); // not a new document so try as a revision of existing document using _id to find
return db.get(doc._id)
.then(function(origDoc) {
doc._rev = origDoc._rev; // get document revision _rev
return db.put(doc,doc._id,doc._rev)
.then(function() {
console.log('success - revision of document');
})
.catch(function(e){
console.log(e); // log error for failure
});
})
.catch(function(e){
console.log(e); // log error before we take any other action
counter ++; // increment counter, so we can limit retries (5 by default)
if (counter< 5) {
switch (e.status) {
case 404:
delete doc._rev; // remove revision information so we can see if this works
return qUpdate(doc); // might be deleted so return revised document for retry
case 409:
return qUpdate(doc); // in conflict so try again
default:
try {
throw new Error("cannot save: " + doc._id); // cannot go any further so throw new error
} catch(err) {
console.log(err); // log error for failure
}
}
} else {
try {
throw new Error("cannot save" + doc._id); // cannot go any further so throw new error
} catch(err) {
console.log(err); // log error for failure
}
}
});
});
}
// update a document batch stored in an array
function updateBatch(docs) {
return $q.when(qUpdateBatch(docs));
}
// do the actual update of a batch
function qUpdateBatch(docs) {
db.bulkDocs(docs).then(function(res) {
for (var i=0; i < res.length; i++) {
if (res[i].status === 409) {
update(docs[i]); // in conflict so try this document separately
}
}
}).catch(function(e){
console.log(e); // log error
});
}
// get the document as an angular promise and deal with it in host routine
function getDoc(id) {
return $q.when(db.get(id));
}
// get all documents
function getAllDocs() {
return $q.when(db.allDocs({include_docs: true, attachments: false}));
}
// get a batch of documents between a start and end key
function getList(key) {
return $q.when(db.allDocs({startkey: key, endkey: key + '\uffff', include_docs: true, attachments: false}));
}
}
})();
In your main controller you would want to set the database:
$db.setLocalDB('yourDB');
Hope this is what you were looking for?
In my own data services module I have other functions for remote database, event listeners, remove, sync, compact, destroy and so on.
Related
I am using a quick action on case object to close the case with the help of aura component.
In aura component I am using the below code to get the work items related to agent and close them:
omniAPI.getAgentWorks().then(function(result) {
var works = JSON.parse(result.works);
console.log('Works : ',works);
//how to get current case's work ID
var work = works[0];
console.log('Works[0] : ',work);
// Update and add condition
//add if condition
omniAPI.closeAgentWork({workId: work.workId}).then(function(res) {
if (res) {
console.log("Closed work successfully");
var workspaceAPI = cmp.find("workspace");
workspaceAPI.getFocusedTabInfo().then(function(response) {
var focusedTabId = response.tabId;
workspaceAPI.closeTab({tabId: focusedTabId});
})
.catch(function(error) {
console.log(error);
});
} else {
console.log("Close work failed");
}
}).catch(function(error) {
console.log(error);
});
});
here getAgentWork is retrieving all the work item related to the agent,how do I figure out the current case's workID? so that I can close only that workitem using omniAPI.closeAgentWork({workId: work.workId}).
I am using meteor/react for learning facebook graph api.
I want to access users' post on facebook timeline and display them on screen. How can that be done?
With the guidance of the solution provided here [How to perform common FB actions using Meteor?. I have tried the following code: server.js
Meteor.methods({
'seePost' : function(){
var graph=Npm.require('fbgraph');
if(Meteor.user().services.facebook.accessToken){
graph.setAccessToken(Meteor.user().services.facebook.accessToken);
var future = new Future();
var onComplete = future.resolver();
graph.get('/me/feed',function(err,result) {
console.log(result);
return onComplete(err,result);
})
Future.wait(future);
}
else{
return false;
}
}
});
client side code :
Meteor.call("seePost", function(err,result) {
if(err) console.log("error" , err);
else console.log("RES", result);
});
I expect the result displayed in the client side console since I want to show the users the posts on his/er timeline, But I get following output :
RES, undefined
You can do it using await and Meteor.callAsync
Basically the client code waits for the call to complete, and gives you the returned data
const result = await Meteor.callAsync("seePost");
Errors should be handled with a try..catch block
If you use fibers/future, you need to return something with "future".
const future = new Future();
// some code getting result or something
future.return(something);
return future.wait();
this will return something in the callback from client call.
try this code, when you're using fibers you need to "wait" for the response
Meteor.methods({
'seePost': function () {
var graph = Npm.require('fbgraph');
if (Meteor.user().services.facebook.accessToken) {
graph.setAccessToken(Meteor.user().services.facebook.accessToken);
var future = new Future();
var onComplete = future.resolver();
graph.get('/me/feed', function (err, result) {
console.log(result);
if (err) {
return future.return(false);
} else {
return future.return(result);
}
})
return future.wait();
}
return false;
}
});
I'm looking to create an array of functions to call dynamically, which will be later used in the Q.all([]) promise call.
For example;
//data is previously generated
var promiseArray = [];
for (var i = 0; i < data.length; i++){
promiseArray.push(functionCall(data[i]))
}
Q.all(promiseArray).then(function(){
//Do something
})
How would I push to the array without calling the function until the Q.all statement? I don't want to call it in the for loop as it will not catch any errors and I can't process the response further.
EDIT:
So to clarify my problem (as I don't think I was as clear as I should have been), here is a solution for a static data length of say 3;
//data is previously generated
var data = [12432432,4324322392,433324323];
//Each function call can happen in parallel or series as its an external POST to an API
//I'm not bothered about speed for this application (as its low throughput) and can wait a few seconds for each
// response
//FunctionCall returns a promise
functionCall(data[0]).then(function(){
//Log success / failure to mongo
});
functionCall(data[1]).then(function(){
//Log success / failure to mongo
});
functionCall(data[2]).then(function(){
//Log success / failure to mongo
});
//OR
functionCall(data[0]).then(function(){
//Log success/failure to mongo
functionCall(data[1]).then(function(){
//Log success/failure to mongo
functionCall(data[2]).then(function(){
//Log success/failure to mongo
});
});
});
But I wont know the length of data until runtime
If I understand correctly, you want to call functionCall for an array of items, and have Q.all resolve once all the promises returned by functionCall have completed regardless if they resolve or reject - if you don't care about the results (as you don't seem to in your code) simply handle the rejection in the promise you push - i.e.
var promiseArray = [];
for (var i = 0; i < data.length; i++) {
promiseArray.push(functionCall(data[i]).then(function(result) {
// log success
return logToMongoFunction(result);
}, function(error) {
// log failure
return logToMongoFunction(error);
}).catch(function(error) {
// catch and ignore any error thrown in either logToMongoFunction above
return;
}));
}
Q.all(promiseArray).then(function () {
//Do something
});
Note: the above can be simplified to
Q.all(data.map(function (item) {
return functionCall(item).then(function(result) {
// log success
return logToMongoFunction(result);
}, function(error) {
// log failure
return logToMongoFunction(error);
}).catch(function(error) {
// catch and ignore any error thrown in either logToMongoFunction above
return;
});
})).then(function() {
//Do something
});
the edited question suggests you can perform the actions in series also - in series it would be
data.reduce(function(promise, item) {
return promise.then(function() {
return functionCall(item).then(function(result) {
// log success
return logToMongoFunction(result);
}, function(error) {
// log failure
return logToMongoFunction(error);
}).catch(function(error) {
// catch and ignore any error thrown in either logToMongoFunction above
return;
});
});
}, Promise.resolve()).then(function() {
// all done
});
instead of Promise.resolve() you could use whatever Q has as an equivalent that creates a resolved promise
logToMongoFunction would log to mongo and needs to return a promise if you need to wait for that to finish before processing the next data item. If you do not need to wait for the mongo logging to complete then there's no need for that function to return a promise
i will recommend using Promise.mapSeries or async library for this because its very easy to catch errors. One more thing looping using a for loop doesnt seems to be good approach if you have database calls in the callback because that might flush the calls to the database and node.js can have memory issues or node.js wont be able to entertain any other request because it will be busy entertaining the request in the for loop. so its always good to run loop serially or limit the numer of parallel executions at a time.
please see example below
This will run Array serially one at a time when 1st one completes execution next will be called
async.eachOfSeries(data, function(dataInstance, key, next) {
functionCall(dataInstance).then(function(){
next();
}).catch(funtion(err){
next(err);
})
}, function() {
//iteration completed
});
OR
async.eachOfSeries(data, function(dataInstance, key, next) {
functionCall(dataInstance, function(err , result){
if(err)
{
console.log(err);
next(err);
}
else
next();
});
}, function() {
//iteration completed
});
I've configured all users to be created with an empty favorites array: user.favorites: []
Since the users collection is treated differently, how should I publish, subscribe, and access subscribed favorites data in angular-meteor?
Here's what I have so far:
// Meteor.methods ==========================================
addFavorite: function(attendeeId){
var loggedInUser = Meteor.user();
if( !loggedInUser ){
throw new Meteor.Error("must be logged in");
}
loggedInUser.favorites.push(attendeeId);
loggedInUser.username = loggedInUser.username+"x";
console.log(loggedInUser.favorites);
}
// controller ========================================
$scope.addFavorite = function(attendeeId){
$meteor.call("addFavorite", attendeeId);
}
// server =======================================================
Meteor.publish('myFavorites', function(){
if(!this.userId) return null;
return Meteor.users.find(this.userId);
});
Meteor.users.allow({
insert: function(userId, doc){
return true;
},
update: function(useId, doc, fieldNames, modifier){
return true;
},
remove: function(userId, doc){
return true;
}
});
User.favorites is empty. When addFavorite is called, it logs an array with a single userId that doesn't update the mongoDB at all. It looks as if Meteor.user() isn't reactivly updating. Does anyone know what I'm doing wrong? Thank you!
EDIT
Latest iteration of code. Favorites are passed into $scope.favorites but isn't reactive. How do I fix this? Thanks!
// publish
Meteor.publish('myFavorites', function(){
if(this.userId){
return Meteor.users.find(this.userId, {
fields: {
favorites: 1
}
});
}else{
this.ready();
}
});
// subscribe
$meteor.subscribe('myFavorites')
.then(function(subscriptionHandle)
{
var user = $meteor.collection(function(){
return Meteor.users.find({_id: Meteor.userId()});
});
$scope.favorites = user[0].favorites;
});
tldr;
Accounts collection is reactive, but by default only the username, emails, and profile fields are published. The quickest fix is to attach the favorites as a new field on the User.profile object.
// Meteor.methods ==========================================
addFavorite: function(attendeeId){
var loggedInUser = Meteor.user();
if( !loggedInUser ){
throw new Meteor.Error("must be logged in");
}
if (loggedInUser.profile.favorites){
loggedInUser.profile.favorites.push(attendeeId);
}
else {
loggedInUser.profile.favorites = [];
loggedInUser.profile.favorites.push(attendeeId);
}
loggedInUser.username = loggedInUser.username+"x";
console.log(loggedInUser.profile.favorites);
}
Although right now you probably are writing to the user, which you can verify by using meteor mongo --> db.users.find().pretty(), but the subscription does not publish your favorites field.
Alternative approach
Alternatively, you can publish the favorites field
// Server code --------
Meteor.publish("userData", function () {
if (this.userId) {
return Meteor.users.find({_id: this.userId},
{fields: {'favorites': 1}});
} else {
this.ready();
}
});
Opinionated Meteor.users philosophy
I like to structure my users object around 3 properties:
User.profile --> published to the client, and directly modifiable by the client through client-side code
User.public --> published to the client, but not modifiable except through server-side Meteor methods
User.private --> not published to the client (i.e. only accessible to read on server code), and only modifiable by server code (with client simulation)
Just make sure that when you remove the insecure and autopublish packages that you double-check your Collections security by using the Meteor.users.allow() function in your server code
Run meteor list to if you want to verify whether or not insecure and autopublish packages are being used in your current project. NOTE: By default Meteor does install them when you first create your app)
// Server code --------
Meteor.publish("userData", function () {
if (this.userId) {
return Meteor.users.find({_id: this.userId},
{fields: {'public': 1}});
} else {
this.ready();
}
});
In my angular app I want to make changes to several locations in my firebase with a mix of transactions and set. I have written a promise chain with a little help. Now I need to handle any errors that may occur.
In the event of an error on any of the promises I would want to roll back any changes made in firebase (the successful promises) and alert the user to the failure.
Current code below
$scope.addNewPost = function() {
var refPosts = new Firebase(FBURL).child('/posts').push();
// Get tags into array for incrementing counters
var tags = $scope.post.tags.split(', ');
var allPromises = [];
// Iterate through tags and set promises for transactions to increment tag count
angular.forEach(tags, function(value, index){
var dfd = $q.defer();
var refTag = new Firebase(FBURL).child('/tags/' + value);
refTag.transaction( function (current_value) {
return current_value + 1;
}, function(error, committed, snapshot) {
if (committed) {
dfd.resolve( snapshot );
} else {
dfd.reject( error );
}
});
allPromises.push( dfd.promise );
});
// Add promise for setting the post data
var dfd = $q.defer();
refPosts.set( $scope.post, function (error) {
if (error) {
dfd.reject(error);
} else {
dfd.resolve('post recorded');
}
});
allPromises.push( dfd.promise );
$q.all( allPromises ).then(
function () {
$scope.reset(); // or redirect to post
},
function (error) {
// error handling goes here how would I
// roll back any data written to firebase
alert('Error: something went wrong your post has not been created.');
}
);
};
So what I need to know is how do I roll back any changes that happen to my firebase data in the event that one of these promises fail. There could be any number of updates happening in firebase. (for example: 3 tags being incremented via transaction and the post data being set)
How would I write the failure function to calculate what was successful and undo it? If this is this even possible.
--------------- sub question from original post has been solved ---------------
Also how do you force errors? I've tried setting a variable like below but it doesn't seem to work, is there something wrong with my .then?
refPosts.set( $scope.post, function (error) {
var forceError = true;
if (forceError) {
dfd.reject(forceError);
} else {
dfd.resolve('post recorded');
}
allPromises.push( dfd.promise );
});
There are two instances of this line, and they are both in the wrong place:
allPromises.push( dfd.promise );
In the first block, it should be in the last statement in the forEach callback, not in the transaction callback.
In the second block, it should be after the call to set(), not in the callback.
The way your code is written now, $q.all() is getting an empty array of promises. That could also be what's interfering with the forceError test you're attempting.