I'm working on an angular project that uses Firebase as it's sole backend, angularFire for some synchronisation cases and I'm using this tool Firebase.util for dealing with shared resources. My case is this:
{
users {
user1 : {
tasks : {
active : {
task1 : true,
task2 : true
},
archived : {
task3 : true,
task4 : true
}
},
...
},
tasks : {
task1 : {
users : {
user1 : true,
user2 : true
}
},
...
}
},
}
and I'm dealing with the query like this:
var tasksRef = new $window.Firebase(FIREBASE_URL + '/tasks');
function _userActiveTasksRef(userId) {
return new $window.Firebase(FIREBASE_URL + '/users/' + userId + '/tasks/active');
}
function _userArchivedTasksRef(userId) {
return new $window.Firebase(FIREBASE_URL + '/users/' + userId + '/tasks/archived');
}
function getActive(userId) {
var interRef = $window.Firebase.util.intersection(_userActiveTasksRef(userId), tasksRef);
return $firebase(interRef).$asArray();
}
function getArchived(userId) {
var interRef = $window.Firebase.util.intersection(_userArchivedTasksRef(userId), tasksRef);
return $firebase(interRef).$asArray();
}
On the first case, when I intersect the active tasks with the "all tasks" ref everything works fine but when I try to perform the same operation with the archived tasks the intersection is always empty. I've already logged the individual queries and everything is working as expected, only the intersection doesn't seem to work. Is there any caveat that I'm missing? The two queries are being loaded at the same time if that matters.. the result is being stored in a controller like this:
this.tasks = {
active: tasks.getActive(currentUser.uid),
archived: tasks.getArchived(currentUser.uid)
};
Related
I have entered the code right, and it worked too. But now i dont know why i am getting same error on different lines of code each time i test. The error comes at the chained commands.
Sometimes its a chained Cy.find commands while sometimes its some other chained command.
::EDIT::
I have changed the code a bit tried doing different methods etc, but still getting the same error. So the code below(Code-1) is the cypress commands to execute the integration test. Since this particular test values might change so I made another file to pass all those values as a single object(Code-2).
FYI the code worked first time, when i didn't have Code-2, ie: when i manually inserted all those values.
Code-1:
/* eslint-disable no-undef */
import { cropFileName, testData } from '../test_case/quoteGen'
describe('waste-Quote-Generation-for-Permanant-Client', () => {
beforeEach(() => {
cy.visit('/login');
});
it('user login', () => {
// login
cy.findByPlaceholderText(/username/i).type(testData.username)
cy.findByPlaceholderText(/password/i).type(testData.pass)
cy.get('.newLoginPanelBtn').click();
// navigate to 'Sales' tab.
cy.findByRole('link', {
name: /sales/i,
timeout: 5000
})
.should('be.visible')
.click({ force : true });
// Click on 'Generate Quote'
cy.findByRole('link', {
name: /generate quote/i,
timeout: 5000
}).click({ force : true })
// cy.wait(3000)
// Select Template
cy.findByRole('link', {
name: /template/i,
timeout: 5000
})
.should('be.visible')
.click({ force : true })
// Select Client
if(testData.clientType!=='create')
cy.findByText(testData.client).click()
else{
cy.get('#newSiteName').click();
cy.get('#newSiteName').type(testData.client);
cy.get('#newSiteEmail').click();
cy.get('#newSiteEmail').type(testData.clientEmail);
cy.get('#createNewSite').click();
cy.wait(200);
cy.findByText(testData.client).click()
}
// Choose Type of Waste
if(testData.wasteTypeOpt.newName!==null){
cy.findByText(testData.wasteTypeOpt.name).click()
cy.get('#towAddNewBtn').click();
}else{
cy.findByText(testData.wasteTypeOpt.name).click()
cy.get('#towContinueBtn').click();
}
// Check if Job Card Exist
if(testData.jobCard.new){
cy.findByText(/create job card/i).click();
// Add Job Card and Verify Job Card
cy.get('.jobCardDiv1 .jobCardDivInpContentDiv:nth-child(6) .jobCardDivInp').click();
cy.get('.jobCardDiv1 .jobCardDivInpContentDiv:nth-child(6) .jobCardDivInp').type('1234');
cy.get('#jobCardDate > span:nth-child(2)').click();
cy.get('.MuiButton-textPrimary:nth-child(2) > .MuiButton-label').click();
cy.get('.newJobCardBtnActive').click({ force : true });
}
cy.get('#job-cards-list > div:first').click();
cy.get('.newJobCardBtnActive').click({ force : true })
// Select Attachments
cy.get('#attach-file-list').children().each((_,index, collection) => {
if(index!==3 && testData.attachments.includes(index))
collection.eq(index).click()
})
cy.get('#attach-file-list').children().findByText(cropFileName('safety attachments',25,5,5)).click()
setTimeout(() => {
cy.findByText(/add to attachment/i).click();
}, 3000);
cy.get('#quoteEditorButton').click();
// Click on 'Quote'
cy.get('#updateScopeOfWorkButton').click();
cy.get('#editTableEditorModal').find('tr').eq(0).then(tbrow => {
testData.scopeofWork.map((el, k) => {
cy.wrap(tbrow).findAllByPlaceholderText('Description').dblclick()
cy.wrap(tbrow).findAllByPlaceholderText('Description').type(el.descp)
cy.wrap(tbrow).find('.navbar-button').click();
cy.findAllByText(el.tow).click()
cy.wrap(tbrow).findByPlaceholderText('volume').dblclick()
cy.wrap(tbrow).findByPlaceholderText('volume').type(el.vol)
cy.wrap(tbrow).findByPlaceholderText('Unit').dblclick()
cy.wrap(tbrow).findByPlaceholderText('Unit').type(el.unit)
cy.wrap(tbrow).findByPlaceholderText('pricing').dblclick()
cy.wrap(tbrow).findByPlaceholderText('pricing').type(el.pricing)
if(k<testData.scopeofWork.length-1)
cy.get('#addNewRowScopeOfWork').click()
return 0
})
})
cy.get('#updateTableModalButton').click();
cy.get('#continueGNRTQT').click()
// Insert mail id, Click on Generate Quote.
cy.get('.quote-cnrf-to-layout:nth-child(2) .quote-cnrf-emails-input').click();
cy.get('.quote-cnrf-to-layout:nth-child(2) .quote-cnrf-emails-input').type(testData.email);
cy.get('.quote-cnrf-to-layout:nth-child(3) .quote-cnrf-emails-input').click();
cy.get('.quote-cnrf-emails-input:nth-child(2)').click();
cy.get('.quote-cnrf-emails-input:nth-child(2)').type('asdf');
cy.get('.quote-cnrf-emails-textarea').click();
cy.get('.quote-cnrf-emails-textarea').type('asdf');
if(testData.sendType)
cy.get('#generateandsend').click();
else
cy.get('#approvedQuote').click();
cy.get('.swal2-confirm').click();
})
})
Code 2:
export const testData = {
username : 'Tester',
pass : '##tester001',
tabType : "",
template : /template/i,
clientType : 'permanent', // permanent, temporary, create
client : /geolocated site/i,
clientEmail : null,
wasteTypeOpt: {
name : /dilution pit/i,
newName : null, },
jobCard : {
new : false,
},
attachments : [],
scopeofWork : [
{ descp : 'asdf',
tow : /grease/i,
vol : '10',
unit : '5',
pricing : '2'
}
],
email : 'asdf#asdf.com',
sendType : true // true => 'Generate and send', false => 'Approved Quote'
}
export const cropFileName = (str, len, frnt, lst) => {
RegExp.escape = function(string) {
return string.replace(/[-\\^$*+?.()|[\]{}]/g, '\\$&')
};
let lastIndx = str ? str.length : 0
if(lastIndx <= len){
return new RegExp(str, "i")
}
else{
console.log('NewREgExp=>',new RegExp(RegExp.escape(str.substring(0,frnt)+'...'+str.substring(lastIndx-lst,lastIndx))))
return new RegExp(RegExp.escape(str.substring(0,frnt)+'...'+str.substring(lastIndx-lst,lastIndx)))
}
}
// ENCASE STRING in CASE INSENSITIVE \i
The 'cropFileName' function is used to manage all those long names. The long names will be snipped in CSS or JS by frontend, 'cropFileName' helps to determine the length of the string and decide whether to add '.' in between the string for RegEx conversion.
I am trying to query the Demandware (SFCC) api to extract orders using a date range. The POST to orders_search works but it seems terribly inefficient. First I pull ALL data and then I filter the results.
I would like to simply query for the date ranges, but I cannot figure out how to do that. Help.
{
query : {
filtered_query: {
query: {
term_query: { fields: ["creation_date"], operator: "is_not_null" }
},
filter: {
range_filter: {
field: "creation_date",
from: "2017-08-12T00:00:00.000Z",
to: "2017-08-13T00:00:00.000Z",
from_inclusive: true
}
}
}
}
}
EDIT: While I solved the initial question, this ends up being more complicated because the service only allows 200 responses at a time. So first you have to make a request to find out how many results there are, then call the service multiple times to get data. Below is the code as used with C#. The date ranges are passed in as variables.
var m_payload_count = "{ query : { filtered_query: { query: { term_query: { fields: [\"creation_date\"], operator: \"is_not_null\" } }, filter: { range_filter: { field: \"creation_date\", from: \"" + strBeginDateTime + "\", to: \"" + strEndDateTime + "\", from_inclusive: true } } } } }";
// can only get 200 responses at a a time so make a basic call first to get the total
m_response_count = apiClient.UploadString(m_url, m_payload_count);
dynamic m_jsoncount = JsonConvert.DeserializeObject(m_response_count);
// determine number of times of full api call, rounding up. substitute begin/end dates and beginning count placeholder
int m_records = m_jsoncount["total"];
int m_numbercalls = (m_records + (200 - 1)) / 200;
dynamic m_json;
for (int i = 0; i < m_numbercalls; i++)
{
var m_payload = "{ query : { filtered_query: { query: { term_query: { fields: [\"creation_date\"], operator: \"is_not_null\" } }, filter: { range_filter: { field: \"creation_date\", from: \"" + strBeginDateTime + "\", to: \"" + strEndDateTime + "\", from_inclusive: true } } } }, select: \"(**)\", start: " + i * 200 + ", count: 200 }";
m_response = apiClient.UploadString(m_url, m_payload);
m_json = JsonConvert.DeserializeObject(m_response);
The remainder of the code is omitted, but it is essentially iterating through the m_json object.
{
"query" :
{
"filtered_query": {
"query": { match_all_query: {} },
"filter": {
"range_filter": {
"field": "creation_date",
"from": "2016-01-01T00:00:00.000Z"
}
}
}
},
"select" : "(**)"
}
I need something like that, but i dont get the correct concatenation with the for and the push. In fact, i would like to avoid doing multiple queries, one per "palabras[i]". It would be excelent if i be able to go only one time to the database:
function ($scope, $rootScope, $stateParams, $firebaseArray) {
var arrayRta = new Array();
var palabras = $rootScope.textAreaOfrecer.toLowerCase().split(' ');
for (i = 0; i < palabras.length; i++) {
var elementosConI = firebase.database().ref().child("solCompras").orderByChild("palabras/" + palabras[i]).equalTo(true);
arrayRta.push(elementosConI);
}
$scope.solicitudes = arrayRta;//$firebaseArray(arrayRta);
}
{
"solCompras" : {
"-KdTUecpbUuWJO_Fbj5Y" : {
"palabras" : {
"123" : true,
"444" : true,
"123123" : true
},
"post" : "123 123123 444",
"user" : "demo"
},
"-KdTcRy_P0rjEpnHwHCC" : {
"palabras" : {
"123" : true
},
"post" : "123",
"user" : "demo"
},
If you're looking to add the words to the list in the database, just call firebase.database().ref().child("solCompras").push(palabras[i]) for each word.
This is going to be as fast as adding them all in one call, because the requests are pipelined over the same connection. See Speed up fetching posts for my social network app by using query instead of observing a single event repeatedly
I am trying to implement syncUp function for offline devices in Salesforce on sendbox account with the following method:
var sfSmartsync = function() {
return cordova.require("com.salesforce.plugin.smartsync");
};
target = {
};
/*target = {
type : "Lead"
query : "select * from {Lead}"
};*/
options = {
mergeMode : sfSmartsync().MERGE_MODE.OVERWRITE,
attributes: {type:"Lead"}
};
soupName = "Lead";
// sfSmartsync().syncUp(false, target, soupName, options, function sucessCallback(params) {
sfSmartsync().syncUp(soupName, options, function sucessCallback(params) {
alert("Sync Up Sucess");
console.log(JSON.stringify(params));
}, function errorCallback() {
alert("Sync Up Error");
});
And I got a success which is :
{"status":"NEW","target":{"modificationDateFieldName":"LastModifiedDate","idFieldName":"Id","type":"rest"},"_soupEntryId":3,"maxTimeStamp":0,"totalSize":-1,"type":"syncUp","progress":0,"options":{"mergeMode":"OVERWRITE"},"soupName":"Lead"}
But when I go back to the Salesforce server I can't find my changes in the database. If anybody did something like that, then please help me.
I've had a requirement recently to implement a UI for managing a many-many relationship. Ward Bell kindly provided this plunker showing how to implement using 1-m-1 with Angular and Breeze.
My app's design is based largely (especially the datacontext and the local storage) is based largely on John Papa's recent Pluralsight courses.
In my app, BusUnit = Hero and Dimension = Power (in reference to Ward's example.
Everything seems to be working well when I force the app to fetch data from the server, in that my updates to a business unit's dimensions reflect correctly. The problem I'm facing now is when I navigate away from the page and back again (which gets data from local storage). In this case:
if I previously added a new dimension to a business unit, everything is ok, but
if i previously marked a business unit's dimension for deletion and the save, the dimension still appears for the business unit in question.
this is the controller code that initially gets business units and their dimensions:
function getdboardStructure() {
var busUnitsPromise = datacontextSvc.busUnits.getByDboardConfigId(vm.dboardConfig.id);
var dimensionsPromise = datacontextSvc.dimensions.getByDboardConfigId(vm.dboardConfig.id);
$q.all([busUnitsPromise, dimensionsPromise])
.then(function (values) {
vm.busUnits = values[0];
vm.dims = values[1];
createBusUnitVms();
//vm.currentBusUnitVm = vm.busUnitVms[0]; // not required as using accordion instead of drop-down
vm.hasChanges = false;
});
}
this is the code in my controller that prepares for the save:
function applyBusUnitDimensionSelections(busUnitVm) {
var busUnit = busUnitVm.busUnit;
var mapVms = busUnitVm.dimensionMapVms;
var dimensionHash = createBusUnitDimensionHash(busUnit);
mapVms.forEach(function (mapVm) {
var map = dimensionHash[mapVm.dimension.id];
if (mapVm.selected) {
if (!map) {
datacontextSvc.busUnits.addBusUnitDimension(busUnit, mapVm.dimension)
.then(function () {
});
}
} else {
if (map) {
datacontextSvc.markDeleted(map);
}
}
});
}
this is the code in my controller that executes the save:
function save() {
if (!canSave()) {
return $q.when(null);
}
vm.isSaving = true;
vm.busUnitVms.forEach(applyBusUnitDimensionSelections);
return datacontextSvc.save().then(function (saveResult) {
vm.isSaving = false;
trapSavedDboardConfigId(saveResult); // not relevant to use case
}, function (error) {
vm.isSaving = false;
});
}
this is the code in my repository that add a new busUnitDimension entity:
function addBusUnitDimension(busUnit, dimension) {
var newBusUnitDimension = this.em.createEntity(busUnitDimension);
newBusUnitDimension.busUnitId = busUnit.id;
newBusUnitDimension.dimensionId = dimension.id;
return this.$q.when(newBusUnitDimension);
}
this is my datacontext code for marking an item deleted:
function markDeleted(entity) {
return entity.entityAspect.setDeleted();
}
and finally this is the repository code to get business units and their join table entities:
function getByDboardConfigId(dboardConfigId, forceRefresh) {
var self = this;
var predicate = pred.create('dboardConfigId', '==', dboardConfigId);
var busUnits;
if (self.zStorage.areItemsLoaded('busUnits') && !forceRefresh) {
busUnits = self._getAllLocal(entityName, orderBy, predicate);
return self.$q.when(busUnits);
}
return eq.from('BusUnits')
.expand('BusUnitDimensions')
.where(predicate)
.orderBy(orderBy)
.using(self.em).execute()
.to$q(succeeded, self._failed);
function succeeded(data) {
busUnits = data.results;
self.zStorage.areItemsLoaded('busUnits', true);
self.zStorage.save();
//self.logSuccess('Retrieved ' + busUnits.length + ' business units from server', busUnits.length, true);
return busUnits;
}
}
My departure from John's course examples is that I'm using expand in the function I use to get Business Units from the server, and my hypothesis is that this has something to do with the fact that breeze is going to the server everytime I refresh the page (without clearing cache) instead, and that this also has something to do with the error i'm receiving if I navigate away and then back to the page.
Can anyone offer and suggestions?
Appreciate this was a long time ago and you have probably solved it or moved on but I came up against the same problem recently that took me ages to resolve.
The answer I found is that you have to edit JP's angular.breeze.storagewip.js file.
I contains the names of the entities hard-coded into the file and you will need to change these to match your own entities.
There are two functions where you need to do this, examples below show the changes with the four entities I am using:
function zStorageCore($rootScope, zStorageConfig) {
var storeConfig = zStorageConfig.config;
var storeMeta = {
breezeVersion: breeze.version,
appVersion: storeConfig.version,
isLoaded: {
elementAssets : false,
surveyors : false,
elements : false,
assets : false
}
};
and...
function checkStoreImportVersionAndParseData(importedData) {
if (!importedData) {
return importedData;
}
try {
var data = JSON.parse(importedData);
var importMeta = data[0];
if (importMeta.breezeVersion === storeMeta.breezeVersion &&
importMeta.appVersion === storeMeta.appVersion) {
if (importMeta.isLoaded) {
storeMeta.isLoaded.assets = storeMeta.isLoaded.assets || importMeta.isLoaded.assets;
storeMeta.isLoaded.elements = storeMeta.isLoaded.elements || importMeta.isLoaded.elements;
storeMeta.isLoaded.surveyors = storeMeta.isLoaded.surveyors || importMeta.isLoaded.surveyors;
storeMeta.isLoaded.elementAssets = storeMeta.isLoaded.elementAssets || importMeta.isLoaded.elementAssets;
}
return data[1];
} else {
_broadcast(storeConfig.events.error,
'Did not load from storage because mismatched versions',
{ current: storeMeta, storage: importMeta });
}
} catch (ex) {
_broadcast(storeConfig.events.error, 'Exception during load from storage: ' + ex.message, ex);
}
return null; // failed
}
I solved this by comparing JP's Style Guide course files with his SPA/Angular/Breeze course.