I am using bramski/angular-indexedDB in my application. Basic CRUD operations are working fine, but the custom queries are not working as expected.
I am using the code
angular.module('myModuleName', ['indexedDB'])
.config(function ($indexedDBProvider) {
$indexedDBProvider
.connection('myIndexedDB')
.upgradeDatabase(1, function(event, db, tx){
var objStore = db.createObjectStore('people', {keyPath: 'ssn'});
objStore.createIndex('name_idx', 'age', {unique: false});
objStore.createIndex('name_idx, age_idx', ['name', 'age'] , {unique: false});
});
Basic query operations are working like follows
$indexedDB.openStore('people', function(x){
var find = x.query();
find = find.$eq('John');
find = find.$index("name_idx");
x.eachWhere(find).then(function(e){
$scope.list= e;
});
});
which results following query.
select * from people where name='John'
But, in the above scenario how we can execute custom quires like
select * from people where name='John' and age='25';
delete from people where name='John' and age='25';
The library you are using doesn't have complex queries, however you can write a pure-js solution for it, similar to this:
First you need to define your index as:
objStore.createIndex('name_age_idx', ['name', 'age'] , {unique: false});
Then you can have a search query for only those values that match the search result
searchIndexedDB = function (name, age, callback) {
var request = indexedDB.open(dbName);
request.onsuccess = function(e) {
var db = e.target.result;
var trans = db.transaction(objectStoreName, 'readonly');
var store = trans.objectStore(objectStoreName);
var index = store.index('name_age_idx');
var keyRange = IDBKeyRange.only([name, age]);
// open the index for all objects with the same name and age
var openCursorRequest = index.openCursor(keyRange);
openCursorRequest.onsuccess = function(e) {
let result = e.target.result;
// first check if value is found
if(result){
callback(result.value); // your callback will be called per object
// result.delete() - to delete your object
result.continue(); // to continue itterating - calls the next cursor request
}
};
trans.oncomplete = function(e) {
db.close();
};
openCursorRequest.onerror = function(e) {
console.log("Error Getting: ", e);
};
};
request.onerror = myStorage.indexedDB.onerror;
}
If you need a range from and too index, all you need is change the keyrange to:
var keyRange = IDBKeyRange.bound([name,fromAge], [value, toAge]);
Related
(was not sure what to have as a title, so if you have a better suggestion, feel free to come up with one - I will correct)
I am working on an angular application where I have some menues and a search result list. I also have a document view area.
You can sort of say that the application behaves like an e-mail application.
I have a few controllers:
DateCtrl: creates a list of dates so the users can choose which dates they want to see posts from.
SourceCtrl: Creates a list of sources so the user can choose from which sources he/she wants to see posts from.
ListCtrl: The controller populating the list. The data comes from an elastic search index. The list is updated every 10-30 seconds (trying to find the best interval) by using the $interval service.
What I have tried
Sources: I have tried to make this a filter, but a user clicks two checkboxes the list is not sorted by date, but on which checkbox the user clicked first.
If it is possible to make this work as a filter, I'd rather continue doing that.
The current code is like this, it does not do what I want:
.filter("bureauFilter", function(filterService) {
return function(input) {
var selectedFilter = filterService.getFilters();
if (selectedFilter.length === 0) {
return input;
}
var out = [];
if (selectedFilter) {
for (var f = 0; f < selectedFilter.length; f++) {
for (var i = 0; i < input.length; i++) {
var myDate = input[i]._source.versioncreated;
var changedDate = dateFromString(myDate);
input[i]._source.sort = new Date(changedDate).getTime();
if (input[i]._source.copyrightholder === selectedFilter[f]) {
out.push(input[i]);
}
}
}
// return out;
// we need to sort the out array
var returnArray = out.sort(function(a,b) {
return new Date(b.versioncreated).getTime() - new Date(a.versioncreated).getTime();
});
return returnArray;
} else {
return input;
}
}
})
Date: I have found it in production that this cannot be used as a filter. The list of posts shows the latest 1000 posts, which is only a third of all posts arriving each day. So this has to be changed to a date-search.
I am trying something like this:
.service('elasticService', ['es', 'searchService', function (es, searchService) {
var esSearch = function (searchService) {
if (searchService.field === "versioncreated") {
// doing some code
} else {
// doing some other type of search
}
and a search service:
.service('searchService', function () {
var selectedField = "";
var selectedValue = "";
var setFieldAndValue = function (field, value) {
selectedField = field;
selectedValue = value;
};
var getFieldAndValue = function () {
return {
"field": selectedField,
"value": selectedValue
}
};
return {
setFieldAndValue: setFieldAndValue,
getFieldAndValue: getFieldAndValue
};
})
What I want to achieve is this:
When no dates or sources are clicked the whole list shall be shown.
When Source or Date are clicked it shall get the posts based on these selections.
I cannot use filter on Date as the application receives some 3000 posts a day and so I have to query elastic search to get the posts for the selected date.
Up until now I have put the elastic-search in the listController, but I am now refactoring so the es-search happens in a service. This so the listController will receive the correct post based on the selections the user has done.
Question is: What is the best pattern or method to use when trying to achieve this?
Where your data is coming from is pretty irrelevant, it's for you to do the hook up with your data source.
With regards to how to render a list:
The view would be:
<div ng-controller='MyController as myCtrl'>
<form>
<input name='searchText' ng-model='myCtrl.searchText'>
</form>
<ul>
<li ng-repeat='item in myCtrl.list | filter:myCtrl.searchText' ng-bind='item'></li>
</ul>
<button ng-click='myCtrl.doSomethingOnClick()'>
</div>
controller would be:
myApp.controller('MyController', ['ElasticSearchService',function(ElasticSearchService) {
var self = this;
self.searchText = '';
ElasticSearchService.getInitialList().then(function(list) {
self.list = list;
});
self.doSomethingOnClick = function() {
ElasticSearchService.updateList(self.searchText).then(function(list) {
self.list = list;
});
}
}]);
service would be:
myApp.service('ElasticSearchService', ['$q', function($q) {
var obj = {};
obj.getInitialList = function() {
var defer = $q.defer();
// do some elastic search stuff here
// on success
defer.resolve(esdata);
// on failure
defer.reject();
return defer.promise();
};
obj.updateList = function(param) {
var defer = $q.defer();
// do some elastic search stuff here
// on success
defer.resolve(esdata);
// on failure
defer.reject();
return defer.promise();
};
return obj;
}]);
This code has NOT been tested but gives you an outline of how you should approach this. $q is used because promises allow things to be dealt with asynchronously.
This problem has me stumped.
For some reason, the autoincrementing key generator in indexedDB resets after performing and update on an existing object with a put-transaction, leading to overwrites of data in the database.
For my app, I'm using a self written IndexedDB service for angularJS with all the basic CRUD functions implemented.
I may also add that I'm developing with Ionic Framework, even though I doubt that is to blame.
Considering the service is a work-in-progress, I've let the key path for an object store default to "id" with an autoincrementing strategy.
The indices for the given store, nevertheless, are up to the user to decide in a specific object.
As an example:
dbHelper.objectStores = [{'employees',
indices: [{indexName: 'name', isUnique: false},
{indexName: 'phone', isUnique: true}]}];
This would, unless already created in the db, create the object store 'employees' with indices 'name' and 'phone', where 'phone' would have to be a unique value while 'name' would not.
Here is the implementation of the openDB function.
Please note that dbHelper.objectStores is supposed to be empty as it's up to the user to assign these properties before opening the db(or else it is defaulted).
angular.module('dbProvider', [])
.factory('$db', ['$window', function($window) {
// DB Object
var dbHelper = {};
// Properties - Are given defaults unless assigned manually by user before openDB is invoked.
dbHelper.dbName = 'defaultDB';
dbHelper.dbVersion = 1;
dbHelper.objectStores = [];
dbHelper.openDB = function(onCompleteCallback, onErrorCallback) {
console.log('Atempting to open db with name ' + dbHelper.dbName + '.');
var request = $window.indexedDB.open(dbHelper.dbName, dbHelper.dbVersion);
// Invoked by indexedDB if version changes
request.onupgradeneeded = function(e) {
console.log('Version change. Current version: ' + dbHelper.dbVersion);
var db = e.target.result;
e.target.transaction.onerror = onErrorCallback;
if(dbHelper.objectStores.length === 0) {
dbHelper.objectStores.push({name:'defaultStore', indices: []});
}
for(var store in dbHelper.objectStores) {
if(db.objectStoreNames.contains(dbHelper.objectStores[store].name)) {
console.log(dbHelper.objectStores[store].name + ' deleted.');
db.deleteObjectStore(dbHelper.objectStores[store].name);
}
var newStore = db.createObjectStore(dbHelper.objectStores[store].name, {keyPath: "id", autoIncrement: true});
for(var index in dbHelper.objectStores[store].indices) {
newStore.createIndex(dbHelper.objectStores[store].indices[index].indexName,
dbHelper.objectStores[store].indices[index].indexName,
{unique : dbHelper.objectStores[store].indices[index].isUnique});
}
console.log(dbHelper.objectStores[store].name + ' created.');
}
};
request.onsuccess = function(e) {
console.log('DB ' + dbHelper.dbName + ' open.');
dbHelper.indexedDB.db = e.target.result;
onCompleteCallback();
};
request.onerror = onErrorCallback;
};
Here are some of the CRUD functions(the ones in question):
dbHelper.findItemWithIndex = function(keyValue, storename,
onCompleteCallback,onErrorCallback) {
var db = dbHelper.indexedDB.db;
var trans = db.transaction([storename], "readwrite");
var store = trans.objectStore(storename);
var index = store.index(keyValue.key);
index.get(keyValue.value).onsuccess = function(event) {
onCompleteCallback(event.target.result);
};
};
dbHelper.addItemToStore = function(item, storename,
onCompleteCallback, onErrorCallback) {
var db = dbHelper.indexedDB.db;
var trans = db.transaction([storename], "readwrite");
var store = trans.objectStore(storename);
var request = store.add(item);
trans.oncomplete = onCompleteCallback;
request.onerror = onErrorCallback;
};
dbHelper.deleteItemFromStore = function(itemId, storename,
onCompleteCallback, onErrorCallback) {
var db = dbHelper.indexedDB.db;
var trans = db.transaction([storename], "readwrite");
var store = trans.objectStore(storename);
var request = store.delete(itemId);
trans.oncomplete = onCompleteCallback;
request.onerror = onErrorCallback;
};
dbHelper.updateItem = function(item, storename, onCompleteCallback, onErrorCallback) {
var db = dbHelper.indexedDB.db;
var trans = db.transaction([storename], "readwrite");
var store = trans.objectStore(storename);
var request = store.put(item);
trans.oncomplete = onCompleteCallback;
request.onerror = onErrorCallback;
};
Finally, the code from the controller where the transactions are invoked.
The strategy here, is that the item is added to the db using the addItemToStore function the first time it is persisted, and then afterwards the updateItem function.
After adding the first time, the object is immediately fetched in order to keep working on it with the assigned id from the db.
$scope.updateTemplate = function() {
console.log('Saving..');
var onCompleteCallback = {};
if(!$scope.formTemplate.firstSave) {
onCompleteCallback = $scope.updateModel;
} else {
$scope.formTemplate.firstSave = false;
onCompleteCallback = $scope.setId;
}
$db.updateItem($scope.formTemplate, $scope.objectStore.name,
onCompleteCallback, $scope.dbError);
};
$scope.newItem = function() {
$db.addItemToStore($scope.formTemplate, $scope.objectStore.name,
$scope.setId, $scope.dbError);
};
$scope.setId = function() {
$db.findItemWithIndex(
{key: 'title',
value: $scope.formTemplate.title},
$scope.objectStore.name,
function(result) {
console.log(JSON.stringify(result));
$scope.formTemplate = result;
},
function(error) {
$scope.dbError(error);
});
}
It's here everything goes to hell.
I add an object, go back to another view and find it in the list with id=1.
I add another object, go back to the list view, and there it is with id=2.
And so forth and so forth..
Then, after updating either of the objects with the $scope.updateTemplate function, which also works like a charm, things get interesting:
The next object added gets id=1 and totally erases good old numero uno from earlier.
The next objects also get id's that cause them to replace the already existing objects.
What could cause this?
For testing I'm using Safari 8 in OS 10.10 and I'm deploying to an LGG2 with KitKat 4.4.2.
To be honest, I skimmed, but I saw this, "Safari 8" - the latest iOS and Safari have serious bugs with IndexedDB: http://www.raymondcamden.com/2014/9/25/IndexedDB-on-iOS-8--Broken-Bad
In iOS9, many of the IndexedDb bugs are fixed, but not all. We are currently testing on iOS9 Beta 2 and this particular bug that you found is not fixed.
We were able to work around this problem by not using autoincrement on our object stores. We just manually find the max key value and increment that.
Inserting an object looks something like this:
var store = db.transaction([entity], "readwrite").objectStore(entity);
store.openCursor(null, "prev").onsuccess = function (event) {
var maxKey = event.target.result.key || 0;
object.id = maxKey + 1;
store.add(object);
}
I am using Ext.data.Store's each(). But this method, when store is filtered, only loops over the filtered records. Do we have any other method or work around to loop over all the records of a store even when a filter is applied on the store.
var attStore = Ext.getStore("myStore");
var allRecords = attStore.snapshot || attStore.data;
allRecords.each(function (record) {
if (record.data.IsUpdated) {
record.set('updatedByUser', true);
}
else {
record.set('updatedByUser', false);
}
record.commit();
});
The line var allRecords = attStore.snapshot || attStore.data;actually returns all the records as intended but when I try to update that record (or one of the property in that record using record.data.property = something) That record is not getting updated.
Thanks
use this
var allRecords = store.snapshot || store.data;
and loop like this
allRecords.each(function(record) {
console.log(record);
});
see this store snapshot
On Sencha Touch 2.3 I needed to do the following to bypass the filter.
var allRecords = store.queryBy(function(){return true;});
allRecords.each(function(r){
doStuff();
});
Starting from Extjs 5 use the following
Ext.data.Store.each( fn, [scope], [includeOptions] )
i.e.
store.each(function(record) {
// ...
}, scope, {filtered: true});
// Here's how you can do that ...
myStore.each(function(record)
{
record.fields.each(function(field)
{
var fieldValue = record.get(field.name);
});
// Alternatively...
/*
for (var rd in record.data)
{
var fName = rd;
var fValue = record.data[rd];
}
*/
}, this);
You can use getStore().getDataSource().each(function (r) {}); function to get all data even store
So I am trying storing product types from a json file before trying to add them to a collection but am getting some strange results (as in I dont fully understand)
on my router page i setup a variable for cached products as well as product types
cachedProductTypes: null,
productType : {},
products : {},
getProductTypes:
function(callback)
{
if (this.cachedProductTypes !== null) {
return callback(cachedProductTypes);
}
var self = this;
$.getJSON('data/product.json',
function(data)
{
self.cachedProductTypes = data;
callback(data);
}
);
},
parseResponse : function(data) {
result = { prodTypes: [], products: [] };
var type;
var types = data.data.productTypeList;
var product;
var i = types.length;
while (type = types[--i]) {
result.prodTypes.push({
id: type.id,
name: type.name,
longName: type.longName
// etc.
});
while (product = type.productList.pop()) {
product.productTypeId = type.id,
result.products.push(product);
}
};
this.productType = result.prodTypes;
console.log( "dan");
this.products = result.products;
},
showProductTypes:function(){
var self = this;
this.getProductTypes(
function(data)
{
self.parseResponse(data);
var productTypesArray = self.productType;
var productList=new ProductsType(productTypesArray);
var productListView=new ProductListView({collection:productList});
productListView.bind('renderCompleted:ProductsType',self.changePage,self);
productListView.update();
}
);
}
when a user goes to the show product types page it runs the showProductsType function
So I am passing the products type array to my collection
on the collection page
var ProductsType=Backbone.Collection.extend({
model:ProductType,
fetch:function(){
var self=this;
var tmpItem;
//fetch the data using ajax
$.each(this.productTypesArray, function(i,prodType){
tmpItem=new ProductType({id:prodType.id, name:prodType.name, longName:prodType.longName});
console.log(prodType.name);
self.add(tmpItem);
});
self.trigger("fetchCompleted:ProductsType");
}
});
return ProductsType;
now this doesnt work as it this.productTypesArray is undefined if i console.log it.
(how am I supposed to get this?)
I would have thought I need to go through and add each new ProductType.
the strange bit - if I just have the code
var ProductsType=Backbone.Collection.extend({
model:ProductType,
fetch:function(){
var self=this;
var tmpItem;
//fetch the data using ajax
self.trigger("fetchCompleted:ProductsType");
}
});
return ProductsType;
it actually adds the products to the collection? I guess this means I can just pass an array to the collection and do not have to add each productType?
I guess this means I can just pass an array to the collection and do not have to add each productType?
Yes, you can pass an array to the collection's constructor, and it will create the models for you.
As far as your caching code, it looks like the problem is here:
if (this.cachedProductTypes !== null) {
return callback(cachedProductTypes);
}
The callback statement's argument is missing this - should be return callback(this.cachedProductTypes).
I have a Node.js/Express app that queries a MySQL db within the route and displays the result to the user. My problem is how do I run the queries and block until both queries are done before redirecting the user to the page they requested?
In my example I have 2 queries that need to finish before I render the page. I can get the queries to run synchronously if i nest query 2 inside the 'result' callback of query 1. This however will become very convoluted when the number of queries increase.
How do I go about running multiple (in this case 2) database queries synchronously without nesting the subsequent query in the prior query's 'result' callback?
I've looked at the 'Flow control / Async goodies' in the Node modules and tried flow-js but I can't get it to work with the async queries.
Listed below are the 2 queries that I'm attempting to execute from the '/home' route. Can the Node experts explain the 'right' way to do this.
app.get('/home', function (req,res) {
var user_array = [];
var title_array = [];
// first query
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
req.session.user_array = user_array;
});
// second query
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
req.session.title_array = title_array;
});
// because the queries are async no data is returned to the user
res.render('home.ejs', {layout: false, locals: { user_name: user_array, title: title_array }});
});
The goal with node is not to care what order things happen in. This can complicate some scenarios. There is no shame in nesting callbacks. Once you are used to how it looks, you may find that you actually prefer that style. I do; it is very clear what order callbacks will fire. You can forgo the anonymous functions to make it less verbose if you have to.
If you are willing to restructure your code a bit, you can use the "typical" nested callback method. If you want to avoid callbacks, there are numerous async frameworks that will try and help you do this. One that you might want to check out is async.js (https://github.com/fjakobs/async.js). Example of each:
app.get('/home', function (req,res) {
var lock = 2;
var result = {};
result.user_array = [];
result.title_array = [];
var finishRequest = function(result) {
req.session.title_array = result.title_array;
req.session.user_array = result.user_array;
res.render('home.ejs', {layout: false, locals: { user_name: result.user_array, title: result.title_array }});
};
// first query
var q1 = function(fn) {
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
result.user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
return fn && fn(null, result);
});
};
// second query
var q2 = function(fn) {
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
result.title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
return fn && fn(null, result);
});
}
//Standard nested callbacks
q1(function (err, result) {
if (err) { return; //do something}
q2(function (err, result) {
if (err) { return; //do something}
finishRequest(result);
});
});
//Using async.js
async.list([
q1,
q2,
]).call().end(function(err, result) {
finishRequest(result);
});
});
For a one-off, I would probably just use a reference counting type approach. Simply keep track of how many queries you want to execute and render the response when they have all finished.
app.get('/home', function (req,res) {
var lock = 2;
var user_array = [];
var title_array = [];
var finishRequest = function() {
res.render('home.ejs', {layout: false, locals: { user_name: user_array, title: title_array }});
}
// first query
var sql = 'select user_name from users';
db.execute(sql)
.addListener('row', function(r) {
user_array.push( { user_name: r.user_name } );
})
.addListener('result', function(r) {
req.session.user_array = user_array;
lock -= 1;
if (lock === 0) {
finishRequest();
}
});
// second query
var sql = 'select title from code_samples';
db.execute(sql)
.addListener('row', function(r) {
title_array.push( { title: r.title } );
})
.addListener('result', function(r) {
req.session.title_array = title_array;
lock -= 1;
if (lock === 0) {
finishRequest();
}
});
});
An even nicer approach would be to simply call finishRequest() in each 'result' callback an check for non-empty arrays before you render the response. Whether that will work in your case depends on your requirements.
Here's a really easy trick to handle multiple callbacks.
var after = function _after(count, f) {
var c = 0, results = [];
return function _callback() {
switch (arguments.length) {
case 0: results.push(null); break;
case 1: results.push(arguments[0]); break;
default: results.push(Array.prototype.slice.call(arguments)); break;
}
if (++c === count) {
f.apply(this, results);
}
};
};
Example
Usage:
var handleDatabase = after(2, function (res1, res2) {
res.render('home.ejs', { locals: { r1: res1, r2: res2 }):
})
db.execute(sql1).on('result', handleDatabase);
db.execute(sql2).on('result', handleDatabase);
So basically you need reference counting. This is the standard approach in these situations. I actually use this small utility function instead of flow control.
If you want a full blown flow control solution I would recommend futuresJS
I find that the async library is the best for things like this. https://github.com/caolan/async#parallel
I can't test this or anything, so forgive me if there are some typos. I refactored your query function to be reusable. So, calling queryRows will return a function that matches the format of the async module's parallel callback functions. After both queries are complete, it will call the last function and pass the result of the two queries as an argument, which you can read to pass to your template.
function queryRows(col, table) {
return function(cb) {
var rows = [];
db.execute('SELECT ' + col + ' FROM ' + table)
.on('row', function(r) {
rows.push(r)
})
.on('result', function() {
cb(rows);
});
}
}
app.get('/home', function(req, res) {
async.parallel({
users: queryRow('user_name', 'users'),
titles: queryRow('title', 'code_samples')
},
function(result) {
res.render('home.ejs', {
layout: false,
locals: {user_name: result.users, title: result.titles}
});
});
});
There are some solutions here, but in my opinion the best solution is to make the code synchronously in a very easy way.
You could use the "synchonize" package.
Just
npm install synchronize
Then var sync = require(synchronize);
Put logic which should be synchronous into a fiber by using
sync.fiber(function() {
//put your logic here
}
An example for two mysql queries:
var express = require('express');
var bodyParser = require('body-parser');
var mysql = require('mysql');
var sync = require('synchronize');
var db = mysql.createConnection({
host : 'localhost',
user : 'user',
password : 'password',
database : 'database'
});
db.connect(function(err) {
if (err) {
console.error('error connecting: ' + err.stack);
return;
}
});
function saveSomething() {
var post = {id: newId};
//no callback here; the result is in "query"
var query = sync.await(db.query('INSERT INTO mainTable SET ?', post, sync.defer()));
var newId = query.insertId;
post = {foreignKey: newId};
//this query can be async, because it doesn't matter in this case
db.query('INSERT INTO subTable SET ?', post, function(err, result) {
if (err) throw err;
});
}
When "saveSomething()" is called, it inserts a row in a main table and receives the last inserted id. After that the code below will be executed. No need for nesting promises or stuff like that.
option one: if all your queries related to each other, create stored procedure, put all your data logic into it and have a single db.execute
option two: if your db uses one connection then commands a guaranteed to be executed serially and you can use this as async helper
db.execute(sql1).on('row', function(r) {
req.session.user_array.push(r.user);
});
db.execute(sql2)
.on('row', function(r) {
req.session.title_array.push(r.title);
})
.on('end'), function() {
// render data from req.session
});
You can use fibers to write pseudo-synchronous code with Node.JS take a look at these tests for DB https://github.com/alexeypetrushin/mongo-lite/blob/master/test/collection.coffee
they are asynchronous but looks like synchronous, more details http://alexeypetrushin.github.com/synchronize