Aura Storage - keeps data for a few seconds? (should keep for a long time) - aura-framework

I'm trying to use Aura storage facility to store some graph data on a client side. to create a storage i use:
function getStorage() {
var storageName = "GraphsData";
var storage = $A.storageService.getStorage(storageName);
if (!storage) {
console.log("creating storage...");
storage = $A.storageService.initStorage({
name: storageName, // name
persistent: true, // persistent
secure: true, // secure
maxSize: 1024*1024*10, // maxSize in bytes
defaultExpiration : 60000, // defaultExpiration (seconds)
defaultAutoRefreshInterval: 60000, // defaultAutoRefreshInterval (seconds)
debugLoggingEnabled: true, // debugLoggingEnabled
clearStorageOnInit: false, // clearStorageOnInit
version: "1.0" // version
});
}
console.log("storage: " + storage);
return storage;
}
And it works, i can store some data there by executing:
storage.set("graphData", data);
And retrieve data by:
getGraphData : function() {
var storage = getStorage();
return storage.get("graphData").then( function(data) {
if (data) {
var promise = new Promise(function(resolve, reject) {
resolve(data);
});
return promise;
}
else
{
.....
But after 8-10 seconds, when i try to retrieve data from storage again, storage returns undefined (like the data was removed). But why so fast? i played with different initialization parameters, but it changed nothing.

ok, i found that documentation is misleading (wrong).
by looking into component (thru debug console) i found different properties:
AuraStorage {$adapter$: MemoryAdapter, name: "GraphsData", $maxSize$: 10485760, $expiration$: 10000, $autoRefreshInterval$: 30000…}
$adapter$: MemoryAdapter
$autoRefreshInterval$ : 30000
$debugLogging$ : false
$expiration$ : 10000
$getOperationsInFlight$ : 0
$keyPrefix$ : "1.0:"
$lastSweepTime$ : 1468848393250
$maxSize$ : 10485760
$sweepInterval$ : 300000
$sweepPromise$ : undefined
$sweepingSuspended$ : false
adapter : MemoryAdapter
name : "GraphsData"
version : "1.0"
after setting expiration parameter (instead of defaultExpiration), everything started to work as expected.
Now i use
expiration : 600,
for 10 minutes expiration

Related

Increase the number of messages read in a timeframe using google API

My application needs to read the message id and subject of all the messages in Gmail using Google REST API. I understand that I need to make 2 calls. First call retrieves all the message ids. Then I try to retrieve the subject of the message ids in batch.
Currently I am able to read 3000 messages in 90 seconds (1min and 30 seconds). I want to be able to read more messages in the same timeframe.
Please suggest on how this can be done.
Note: I am already using gzip and partial response. Any other suggestions would be helpful.
The Gmail API has a couple of limitations when it comes to usage:
Daily Usage: 1,000,000,000 quota units per day
Per User Rate Limit: 250 quota units per user per second, moving average (allows short bursts)
Listing messages costs 5 quota units, and getting an individual message also costs 5 quota units.
Listing allows us to get 100 message ids in one API call. This gives us 250 / 5 = 50 listing calls per second => 50 * 100 = 5000 message ids per second. Just listing ~3000 messages in quick succession should not make you hit a brick wall, as far as the quota goes.
Ignoring the bursting capabilities of the quota, getting messages can be done 250 / 5 = 50 messages a second. This would in theory allow us to get 3000 messages in 3000 / 50 = 60 seconds, landing at a little bit more than a minute total.
I have never tried something this quota-intensive before, so I wrote a little experiment for fun using Nodejs, to test how hard you can burst the quota. I made sure to only ask for partial data, and I used batch requests:
var rp = require('request-promise');
var googleUtils = require('google-api-batch-utils');
var createBatchBody = googleUtils.createBatchBody;
var parseBatchResponse = googleUtils.parseBatchResponse;
var _ = require('lodash');
// Boundary used in the batch request, to reduce the number of http requests
// when getting the subject of the messages.
var BOUNDARY = 'example_boundary';
// If the quota should be exceeded, how long should we wait to try again?
var TIMEOUT = 1000;
// Playground access token authorized with the Gmail scopes:
// https://developers.google.com/oauthplayground/
var ACCESS_TOKEN = '{API_KEY}';
function listAllMessageIds() {
var resultingIds = [];
return (function listMessageIds(pageToken) {
return rp({
uri: 'https://www.googleapis.com/gmail/v1/users/me/messages',
qs: {
access_token: ACCESS_TOKEN,
pageToken: pageToken,
fields: 'messages(id),nextPageToken'
},
json: true
}).then(function(response) {
var messages = response.messages;
var nextPageToken = response.nextPageToken;
if (messages) {
resultingIds = resultingIds.concat(_.pluck(messages, 'id'));
}
if (nextPageToken) {
return listMessageIds(nextPageToken);
} else {
return resultingIds;
}
});
})(null);
}
function getSubjectOfAllMessages(messageIds) {
var resultingSubjectIdObjects = [];
var uris = messageIds.map(function(id) {
return {
uri: '/gmail/v1/users/me/messages/' + id,
qs: {
fields: 'id,payload/headers',
format: 'metadata',
metadataHeaders: 'subject'
}
};
});
var idChunks = _.chunk(uris, 100);
return (function getSubjectOfChunk(chunk) {
if (!chunk) {
return resultingSubjectIdObjects;
}
var batchBody = createBatchBody(chunk, BOUNDARY);
return rp({
method: 'POST',
uri: 'https://www.googleapis.com/batch',
headers: {
Authorization: 'Bearer ' + ACCESS_TOKEN,
'Content-Type': 'multipart/mixed; boundary="' + BOUNDARY + '"'
},
body: batchBody
})
.then(parseBatchResponse)
.then(function(messages) {
resultingSubjectIdObjects =
resultingSubjectIdObjects.concat(messages.map(function (m) {
return {id: m.id, subject: _.get(m, 'payload.headers[0].value') || ''};
}));
return getSubjectOfChunk(idChunks.shift());
})
.catch(function(error) {
return new Promise(function(resolve, reject) {
setTimeout(function() {
resolve(getSubjectOfChunk(chunk));
}, TIMEOUT);
});
});
})(idChunks.shift());
}
console.time(1);
listAllMessageIds().then(getSubjectOfAllMessages).then(function(result) {
console.log(result.length + ' messages where fetched in ');
console.timeEnd(1);
}).catch(console.error.bind(console));
// => 7534 messages where fetched in 63277ms
With partial responses and batch requests, I could fetch ~7500 messages without trouble in 63 seconds.
Besides batch, there's also using multiple threads. Also, if you only need subject headers make sure you're using message.get(format=METADATA, metadataHeaders=["subject"]) so you're only requesting the data you need, etc.

How to set total records dynamically from the controller

Here is the problem,
Server responds with several records in JSON, which number is greater than grid pageSize parameter specified in the Store. The total number is not returning by a server in this JSON with data. The number of such records is known and could be different (this number should be requested from the server in another request). The total number is needed for the pagingtoolbar.
How to tell the proxy reader this number from the view controller?
The only workable solution I found is to override the Ext.data.reader.Json reader with the following code:
Ext.define('MyApp.custom.AnotherReader',{
extend: 'Ext.data.reader.Json',
alias : 'reader.anotherReader',
// разбираем ответ и записываем в store
getResponseData : function(response) {
var st = Ext.decode(response.responseText);
st.total = 5;
//console.log(st);
return st;
}
});
The problem is I cannot dynamically change this total parameter from the viewcontroller.
JSON 1:
[
{
"id":"1",
"user_id":"11",
},
{
"id":"2",
"user_id":"12",
},
{
"id":"3",
"user_id":"13",
},
{
"id":"4",
"user_id":"14",
},
{
"id":"5",
"user_id":"15",
}
]
JSON 2:
{
"records_count": "5"
}
You can do this inside your controller -
// some event handler/ or normal function inside your Controller that you'll call
somFunction: function() {
var me = this;
var store = Ext.getStore(<storeId>); // you can even pass the store
//instance as a parameter to this function
var reader = store.getProxy().getReader();
Ext.override(reader, {
getResponseData : function(response) {
var st = Ext.decode(response.responseText);
st.total = me.getValueYouWant();
return st;
}
});
}

Firebase.util - similar intersections with completely different results

I'm working on an angular project that uses Firebase as it's sole backend, angularFire for some synchronisation cases and I'm using this tool Firebase.util for dealing with shared resources. My case is this:
{
users {
user1 : {
tasks : {
active : {
task1 : true,
task2 : true
},
archived : {
task3 : true,
task4 : true
}
},
...
},
tasks : {
task1 : {
users : {
user1 : true,
user2 : true
}
},
...
}
},
}
and I'm dealing with the query like this:
var tasksRef = new $window.Firebase(FIREBASE_URL + '/tasks');
function _userActiveTasksRef(userId) {
return new $window.Firebase(FIREBASE_URL + '/users/' + userId + '/tasks/active');
}
function _userArchivedTasksRef(userId) {
return new $window.Firebase(FIREBASE_URL + '/users/' + userId + '/tasks/archived');
}
function getActive(userId) {
var interRef = $window.Firebase.util.intersection(_userActiveTasksRef(userId), tasksRef);
return $firebase(interRef).$asArray();
}
function getArchived(userId) {
var interRef = $window.Firebase.util.intersection(_userArchivedTasksRef(userId), tasksRef);
return $firebase(interRef).$asArray();
}
On the first case, when I intersect the active tasks with the "all tasks" ref everything works fine but when I try to perform the same operation with the archived tasks the intersection is always empty. I've already logged the individual queries and everything is working as expected, only the intersection doesn't seem to work. Is there any caveat that I'm missing? The two queries are being loaded at the same time if that matters.. the result is being stored in a controller like this:
this.tasks = {
active: tasks.getActive(currentUser.uid),
archived: tasks.getArchived(currentUser.uid)
};

how to check the return status of the store 's sync In Extjs

I use the the
store.sync({
success:function(){},
failure:function(){}
});
to sync with the server; when the server return {success:false} or {success:true};
how I check the json from the server in the store.sync.
I have knew that:success is called by The function to be called upon successful completion of the sync ,even if return {sucess :false} ,not only the {success:true};
You need to change the reader's successProperty to false in the store's proxy.
store.proxy.reader.successProperty = false;
or
var store = Ext.create('Ext.data.Store', {
(...)
proxy : {
type : 'ajax',
(...)
reader : {
successProperty : false,
(...)
}
}
});
and then you can use this:
store.sync({
callback : function (batch, options) {
var operations = batch.operations;
for (var x in operations) {
var operation = operations[x];
if (operation.request) {
console.log('operation.request ---> ', operation.request);
}
if (operation.response) {
console.log('operation.response ---> ', operation.response);
var object = Ext.decode(operation.response.responseText, false);
console.log('success --->', object.success);
}
}
}
});

persisting filters in grid panel

I would like to persist filters applied on gridpanel on page refresh. Can you please guide me in doing this.
Thanks.
Here is the code which send the filter data to webservice
Ext.extend(Ext.ux.AspWebServiceProxy, Ext.data.DataProxy,
{
load: function(params, reader, callback, scope, arg) {
var userContext = {
callback: callback,
reader: reader,
arg: arg,
scope: scope
};
var proxyWrapper = this;
//debugger;
//Handles the response we get back from the web service call
var webServiceCallback = function(response, context, methodName) {
proxyWrapper.loadResponse(response, userContext, methodName);
}
var serviceParams = [];
var filters = {};
//Convert the params into an array of values so that they can be used in the call (note assumes that the properties on the object are in the correct order)
for (var property in params) {
if (property.indexOf("filter[") == 0) {
filters[property] = params[property];
}
else {
serviceParams.push(params[property]);
}
//console.log("Property: ", property, "Value: ", params[property]);
}
serviceParams.push(filters);
//Add the webservice callback handlers
serviceParams.push(webServiceCallback);
serviceParams.push(this.handleErrorResponse);
//Make the actual ASP.Net web service call
this.webServiceProxyMethod.apply(this.webServiceProxy, serviceParams);
},
handleErrorResponse: function(response, userContext, methodName) {
window.location.reload();
// Ext.MessageBox.show({
// title: 'Error',
// msg: response.get_message(),
// buttons: Ext.MessageBox.OK,
// icon: Ext.MessageBox.ERROR
// });
//alert("Error while calling method: " + methodName + "n" + response.get_message());
},
loadResponse: function(response, userContext, methodName) {
var result = userContext.reader.readRecords(response);
userContext.callback.call(userContext.scope, result, userContext.arg, true);
}
});
Turn on the Ext JS state manager globally (where you set Ext.BLANK_IMAGE_URL).
Ext.state.Manager.setProvider(new Ext.state.CookieProvider());
User changes to some components will now be stored in a cookie, which will persist across requests. If you need to store additional custom data, you can do that using Ext.state.Manager.set and Ext.state.Manager.get. State is configurable on individual components.
Saki has a good example.
To persist filters on grid you can use cookies, here you can find some help:
proxy: new Ext.data.HttpProxy({
url: (local ? url.local : url.remote),
method: 'GET',
listeners:{
beforeload : function(dataproxy,param) {
if(param.searchConditions != undefined && param.searchConditions != '[]') {
Ext.util.Cookies.set('SearchConditions',param.searchConditions);
}
}
}
})
In above sample you can find that we are setting "searchConditions" JSONArray in cookies.Now let us see how to get back that "searchCondition" whenever you load you Grid.
store.load({
params:{
start:0,
limit: 50,
searchConditions:JSON.parse(Ext.util.Cookies.get('SearchConditions'));
}
});
Here simply you just need to pass your "searchCondition" parameter value as value stored in Cookie. Hope above example is useful.Please comment for any help.

Resources