React Chrome extension and Promises - reactjs

I am writing a Chrome extension in ReactJS.
I am looping through an array of URLs and trying to get the the HTML content of those pages.
this.state.advertData.map(function(e, i) {
common.updateTabUrl(e.url).then((tab) => {
common.requestHTML(tab).then((response) => {
console.log(response.content);
})
});
})
common.js:
let requestHTML = function(tab) {
return new Promise(function(resolve, reject) {
chrome.tabs.query({active: true, currentWindow: true}, function(tabs) {
chrome.tabs.sendMessage(tab.id, {'req': 'source-code'}, function (response) {
resolve(response)
})
})
})
}
let updateTabUrl = function(url) {
return new Promise(function(resolve, reject) {
let update = chrome.tabs.update({
url: url
}, function(tab) {
chrome.tabs.onUpdated.addListener(function listener (tabId, info) {
if (info.status === 'complete' && tabId === tab.id) {
chrome.tabs.onUpdated.removeListener(listener);
resolve(tab);
}
});
})
})
}
content_script.js
chrome.runtime.onMessage.addListener(function (request, sender, sendResponse) {
let response = '';
if (request.req === 'source-code') {
response = document.documentElement.innerHTML;
}
sendResponse({content: response});
});
My issue is that the response.content always seems to be the same. More importantly, the tab that updates seems to only ever display the last url in my array. I think it is a problem with the way I am handling Promises.
Any help is appreciated.

The problem with your code is that it doesn't wait for the previous URL to load before proceeding to the next one so only the last one gets actually loaded in a tab.
I suggest using 1) Mozilla's WebExtension polyfill, 2) await/async syntax, 3) executeScript that automatically runs when a tab is complete by default 4) a literal code string in executeScript so you don't need neither a separate file nor to declare the content script in manifest.json.
async function getUrlSourceForArray({urls, tabId = null}) {
const results = [];
for (const url of urls) {
await browser.tabs.update(tabId, {url});
const [html] = await browser.tabs.executeScript(tabId, {
code: 'document.documentElement.innerHTML',
});
results.push(html);
}
return results;
}
Invoking inside an async function:
const allHtmls = await getUrlSourceForArray({
urls: this.state.advertData.map(d => d.url),
tabId: null, // active tab
});
P.S. you can also open all the URLs at once in a new window in background, assuming there won't be more than say 10 URLs, otherwise you would risk exhausting the user's RAM.

Related

Nativescript Class constructor Observable cannot be invoked without 'new'

I'm trying to upload a multipart form in nativescript and I'm using http-background. I keep getting the error Class constructor Observable cannot be invoked without 'new'. I've tried changing the compilerOptions target to es5 and es2017, but nothing changed.
Here's all my code from the component.
onSave(){
console.log("clicked")
this.proccessImageUpload(this.file);
}
public onSelectSingleTap() {
this.isSingleMode = true;
let context = imagepicker.create({
mode: "single"
});
this.startSelection(context);
}
private startSelection(context) {
let that = this;
context
.authorize()
.then(() => {
that.imageAssets = [];
that.imageSrc = null;
return context.present();
})
.then((selection) => {
console.log("Selection done: " + JSON.stringify(selection));
this.file = selection[0]._android;
that.imageSrc = that.isSingleMode && selection.length > 0 ? selection[0] : null;
// set the images to be loaded from the assets with optimal sizes (optimize memory usage)
selection.forEach(function (element) {
element.options.width = that.isSingleMode ? that.previewSize : that.thumbSize;
element.options.height = that.isSingleMode ? that.previewSize : that.thumbSize;
});
that.imageAssets = selection;
}).catch(function (e) {
console.log(e);
});
}
// proccess image function
proccessImageUpload(fileUri) {
var backgroundHttp = require("nativescript-background-http");
return new Promise((resolve, reject) => {
// body...
var request = {
url: 'http://192.168.0.2:4000/api/posts',
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"user_id": "<user_id>"
},
description: 'Uploading profile image..',
androidAutoDeleteAfterUpload: false,
androidNotificationTitle: 'Profile image'
}
var params = [
{ name: "title", value: "test" },
{ name: "content", value: "test" },
{ name: "fileToUpload", filename: fileUri, mimeType: "image/jpeg" }
];
var backgroundSession = backgroundHttp.session('image-upload');
var task = backgroundSession.uploadFile(fileUri, request);
task.on("progress", (e) => {
// console log data
console.log(`uploading... ${e.currentBytes} / ${e.totalBytes}`);
});
task.on("error", (e) => {
// console log data
console.log(`Error processing upload ${e.responseCode} code.`);
reject(`Error uploading image!`);
});
task.on("responded", (e) => {
// console log data
console.log(`received ${e.responseCode} code. Server sent: ${e.data}`);
// var uploaded_response = JSON.parse(e.data);
});
task.on("complete", (e) => {
// console log data
console.log(`upload complete!`);
console.log(`received ${e.responseCode} code`);
// console.log(e.data);
})
resolve(task);
});
}
I know the issue is coming from this line.
var task = backgroundSession.uploadFile(fileUri, request);
Any help would be greatly appreciated!
You use old version if nativescript-background-http plugin
You have to install latest version
tns plugin add #nativescript/background-http
I was able to get this working by installing tns version 6.
I had exactly the same problem. I got this from slack.com, compliments Chris Vietor
"tns plugin add nativescript-background-http" works with nativescript 6.
"tns plugin add #nativescript/background-http" works with nativescript 7.

Retrieve collection + subcollection give me an empty render

The problem
At web app startup, the main task is to retrieve all user informations that are stored in various Firestore's collections and sub-collections.
The problem is that, even if I uses promises and a Loading State to prevent an empty app rendering, the app is rendered with all collections data, except for collections that have sub-collections.
The process
- If user is logged
-- Set Loading Status Active
-- Load Collection A
-- Load Collection B and forEach, load all sub-collection
-- Load Collection C
-- Set Loading Status Inactive
At this point, the app is rendered, but only with Collection A and Collection C. The collection B is loaded (I can see it by Redux Logs) but can't be seen in the app.
Those datas appears only if I change the component status (open/close a menu, for example).
Some code
Here's how I retrieve a collection with sub-collections:
export function setCompanyJobs(user) {
return {
type: "SET_COMPANY_JOBS",
payload: loadCompanyJobs(user),
};
}
Function that retrieve the main collection
export function loadCompanyJobs(user) {
return new Promise((resolve, reject) => {
let companyJobs = [];
db.collection("company").doc(user.selectedCompany).collection("jobs").get().then((jobs) => {
jobs.forEach((job) => {
loadJobLinkedServices(user, job).then((jobLinkedServices) => {
companyJobs.push({
id: job.id,
...
});
});
});
resolve(companyJobs);
}).catch(function (error) {
...
});
});
}
Function that retrieve all collection's sub-collections
export function loadJobLinkedServices(user, job){
return new Promise((resolve, reject) => {
let jobLinkedServices = [];
db.collection("company").doc(user.selectedCompany).collection("jobs").doc(job.id).collection("linkedServices").get().then((linkedServices) => {
linkedServices.forEach((linkedService) => {
jobLinkedServices.push({
id: linkedService.id,
...
});
});
resolve(jobLinkedServices)
}).catch(function (error) {
...
});
})
When you do
return new Promise((resolve, reject) => {
let companyJobs = [];
db.collection("company").doc(user.selectedCompany).collection("jobs").get().then((jobs) => {
jobs.forEach((job) => {
loadJobLinkedServices(user, job).then((jobLinkedServices) => {
companyJobs.push({
id: job.id,
...
});
});
});
resolve(companyJobs);
}).catch(function (error) {
...
});
});
nothing ensure that your Promise resolves only after ALL the queries triggered in the jobs.forEach() loop are done (i.e. the promises returned by the calls to the loadJobLinkedServices function have resolved).
I don't know reactjs but I think you can use the JavaScript Promise.all() method along the following lines:
return new Promise((resolve, reject) => {
let promises = [];
let companyJobs = [];
db.collection("company").doc(user.selectedCompany).collection("jobs").get().then((jobs) => {
jobs.forEach((job) => {
promises.push(loadJobLinkedServices(user, job));
});
Promise.all(promises).
then(results => {
//Loop over the results array to populate the companyJobs array
resolve(companyJobs);
})
}).catch(function (error) {
...
});
});
Also, don't forget to correctly chain your calls to the different asynchronous functions, i.e. something like:
query Collection A
THEN query Collection B
THEN query all sub-collections (with Promise.all())
THEN query Collection C
THEN set Loading Status Inactive
Finally, a last remark: Note that the get() method returns a promise, so I am not sure that you need to wrap the calls to the get() method into some new Promises (Again, I am not versed in reactjs, so this remark may be wrong).
In other words, I think you could do something like the following (for example for the loadJobLinkedServices function):
export function loadJobLinkedServices(user, job){
let jobLinkedServices = [];
return db.collection("company").doc(user.selectedCompany).collection("jobs").doc(job.id).collection("linkedServices").get()
.then((linkedServices) => {
linkedServices.forEach((linkedService) => {
jobLinkedServices.push({
id: linkedService.id,
...
});
});
return jobLinkedServices;
}).catch(function (error) {
...
});
})

Using an Array.prototype.forEach in AngularJS not updating to the view

I have a piece of code inside an angular controller that requests three services for data. Once the last service returns, I'm taking the data from all and merge it into a dataset. Out of the nested loops, my "vm" instance of this have the values I want the way I want (using a $log.debug to the console) but when I try to access in the view it has the value of initialization.
Here is my code:
function loadConfigs(userId) {
// Load the apps the user have access to
aimsApps.getAccessibleApps()
.then((resApps) => {
vm.apps = resApps.data;
// Load notifications types
aimsNotificationTypes.getNotificationTypes()
.then((resTypes) => {
vm.types = resTypes;
// Load the configurations for a user
aimsNotificationConfigs.getNotificationConfigs(userId)
.then((resConfigs) => {
vm.configs = resConfigs;
// Create a store like notification[app][type] and load the configs
$log.debug('ALL RESOLVED', vm.apps, vm.types, vm.configs);
vm.apps.forEach((eApp) => {
vm.notifSettings[eApp.name] = [];
vm.types.forEach((eType) => {
vm.configs.forEach((eConfig) => {
if ((eConfig.app === eApp.name) && (eConfig.notificationType === eType.name)) {
vm.notifSettings[eApp.name][eType.name] = eConfig;
} else {
vm.notifSettings[eApp.name][eType.name] = {
app: eApp.name,
user: userId,
notificationType: eType.name,
sendToWeb: true,
sendToEmail: true,
sendToSMS: true,
};
}
});
});
});
$log.debug('NOTIF CONFIG', vm.notifSettings);
});
});
});
}
When this line $log.debug('NOTIF CONFIG', vm.notifSettings); is reached, I can see in the console the values for vm.notifSettings but my view doesn't reflect those changes.
Any suggestions?

Serving PDF content back to browser via Node Express using pdfMake

I am making use of the pdfmake library for generating PDF documents in my node express application and want these to be sent straight back to the client to trigger the browser to automatically download the file.
As a reference point I have been using the following examples for my express middleware:
https://gist.github.com/w33ble/38c5e0220d491148de1c
https://github.com/bpampuch/pdfmake/issues/489
I have opted for sending a buffered response back, so the key part of my middleware looks like this:
function createPDFDocument(docDefinition, callback) {
var fontDescriptors = {
Roboto: {
normal: './src/server/fonts/Roboto-Regular.ttf',
bold: './src/server/fonts/Roboto-Medium.ttf',
italics: './src/server/fonts/Roboto-Italic.ttf',
bolditalics: './src/server/fonts/Roboto-MediumItalic.ttf'
}
};
var printer = new Printer(fontDescriptors);
var pdfDoc = printer.createPdfKitDocument(docDefinition);
// buffer the output
var chunks = [];
pdfDoc.on('data', function(chunk) {
chunks.push(chunk);
});
pdfDoc.on('end', function() {
var result = Buffer.concat(chunks);
callback(result);
});
pdfDoc.on('error', callback);
// close the stream
pdfDoc.end();
}
In my angular application I am using the $resource service and have an endpoint defined like so:
this.resource = $resource('api/document-requests/',
null,
<any>{
'save': {
method: 'POST',
responseType: 'arraybuffer'
}
});
When I try this out, I dont get any browser download kicking in, the response I receive is as follows when looking in Chrome:
And the response headers are as follows:
So it seems I'm not a million miles off, I have searched around and found solutions mentioning about converting to Blob, but I think that's only relevant if I were serving back a Base64 encoded string of the document.
Can anyone suggest what may be my issue here?
Thanks
Here's a router:
router.get('/get-pdf-doc', async (req, res, next)=>{ try {
var binaryResult = await createPdf();
res.contentType('application/pdf').send(binaryResult);
} catch(err){
saveError(err);
res.send('<h2>There was an error displaying the PDF document.
'</h2>Error message: ' + err.message);
}});
And here's a function to return the pdf.
const PdfPrinter = require('pdfmake');
const Promise = require("bluebird");
createPdf = async ()=>{
var fonts = {
Helvetica: {
normal: 'Helvetica',
bold: 'Helvetica-Bold',
italics: 'Helvetica-Oblique',
bolditalics: 'Helvetica-BoldOblique'
};
var printer = new PdfPrinter(fonts);
var docDefinition = {
content: [
'First paragraph',
'Another paragraph, this time a little bit longer to make sure,'+
' this line will be divided into at least two lines'
],
defaultStyle: {
font: 'Helvetica'
}
};
var pdfDoc = printer.createPdfKitDocument(docDefinition);
return new Promise((resolve, reject) =>{ try {
var chunks = [];
pdfDoc.on('data', chunk => chunks.push(chunk));
pdfDoc.on('end', () => resolve(Buffer.concat(chunks)));
pdfDoc.end();
} catch(err) {
reject(err);
}});
};
Everything seems fine to me, the only thing missing is the logic to trigger the download.
Check out this CodePen as an example.
Here I'm using base64 encoded data, but you can just use binary data as well, just don't forget to change the href, where I'm mentioning scope.dataURL = base64....
I had issue serving PDF files from Node.js as well, so I made use of phantomjs. You can checkout this repository for full codebase and implementation.
console.log('Loading web page')
const page = require('webpage').create()
const args = require('system').args
const url = 'www.google.com'
page.viewportSize = { width: 1024, height: 768 }
page.clipRect = { top: 0, left: 0 }
page.open(url, function(status) {
console.log('Page loaded')
setTimeout(function() {
page.render('docs/' + args[1] + '.pdf')
console.log('Page rendered')
phantom.exit()
}, 10000)
})

Lighthouse/Service Worker, how to return http 200 when offline

My application currently uses webpack,angular js, and a service worker.
Using sw-precache plugin to create my service worker.
https://www.npmjs.com/package/sw-precache-webpack-plugin
The service worker caching is going well and I can see my static resources being fetched from serviceworker.js from chrome dev tools.
Now when I run the lighthouse report I am getting the following error still :
URL responds with a 200 when offline
https://github.com/GoogleChrome/lighthouse
In Dev tools when I switch on offline, I can actually see my page load. Some errors in console for some 3rd party scripts failing. Is this the reason for not getting url response 200 because I have some console errors from 3rd party i.e. sample error :
GET https://fonts.googleapis.com/css?family=Roboto+Slab:300,400,700 net::ERR_INTERNET_DISCONNECTED
What exactly is this audit looking for, and how can I achieve it ?
Edit : I added a picture of my network tab when I turn on offline, as I said the page loads fine. I notice my sw.js get's loaded from disk cache which I don't notice on other sites so could be something there.
Also here is sw.js content
'use strict';
var precacheConfig = [["/css/app.styles.77e2a0c3e7ac001193566741984a07f0.css","77e2a0c3e7ac001193566741984a07f0"],["/css/vendor.styles.582e79ead0684a8fb648ce9e543ad810.css","582e79ead0684a8fb648ce9e543ad810"],["/favicon.ico","70ef569d9a12f6873e86ed57d575cf13"],["/fonts/MaterialIcons-Regular.eot","e79bfd88537def476913f3ed52f4f4b3"],["/fonts/MaterialIcons-Regular.svg","a1adea65594c502f9d9428f13ae210e1"],["/fonts/MaterialIcons-Regular.ttf","a37b0c01c0baf1888ca812cc0508f6e2"],["/fonts/MaterialIcons-Regular.woff","012cf6a10129e2275d79d6adac7f3b02"],["/fonts/MaterialIcons-Regular.woff2","570eb83859dc23dd0eec423a49e147fe"],["/icons/launcher-icon-2x.png","91896b953c39df7c40b4772100971220"],["/icons/launcher-icon-3x.png","0aee2add7f56559aeae9555e495c3881"],["/icons/launcher-icon-4x.png","b164109dd7640b14aaf076d55a0a637b"],["/images/aa_logo_only.png","b5b46a8c2ead9846df1f1d3035634310"],["/images/developer.png","e8df747b292fe6f5eb2403c7180c31da"],["/images/facebook.png","8ab42157d0974099a72e151c23073022"],["/images/home-bg.jpeg","0a0f7da8574b037463af2f1205801e56"],["/images/logo.png","e8712312e08ca427d79a9bf34aedd6fc"],["/images/map.png","af3443ef4ab2890cae371c7a3de437ed"],["/images/pattern.png","114d593511446b9a4c6e340f7fef5c84"],["/images/twitter.png","99da44949cd33e16d2d551d42559eaf2"],["/index.html","1e9b5c4b3abba7e13d8d28c98cfb3bb5"],["/js/app.d9ada27616bf469d794d.js","8e2fc74de7d5c122ab8f0aca7e31b075"],["/js/vendor.d9ada27616bf469d794d.js","3bbba4569b6f3b88881b0533260905fe"],["/manifest.json","4bea29155995b63a9f2855637c0fe74c"]];
var cacheName = 'sw-precache-v2-45-' + (self.registration ? self.registration.scope : '');
var ignoreUrlParametersMatching = [/^utm_/];
var addDirectoryIndex = function (originalUrl, index) {
var url = new URL(originalUrl);
if (url.pathname.slice(-1) === '/') {
url.pathname += index;
}
return url.toString();
};
var createCacheKey = function (originalUrl, paramName, paramValue,
dontCacheBustUrlsMatching) {
// Create a new URL object to avoid modifying originalUrl.
var url = new URL(originalUrl);
// If dontCacheBustUrlsMatching is not set, or if we don't have a match,
// then add in the extra cache-busting URL parameter.
if (!dontCacheBustUrlsMatching ||
!(url.toString().match(dontCacheBustUrlsMatching))) {
url.search += (url.search ? '&' : '') +
encodeURIComponent(paramName) + '=' + encodeURIComponent(paramValue);
}
return url.toString();
};
var isPathWhitelisted = function (whitelist, absoluteUrlString) {
// If the whitelist is empty, then consider all URLs to be whitelisted.
if (whitelist.length === 0) {
return true;
}
// Otherwise compare each path regex to the path of the URL passed in.
var path = (new URL(absoluteUrlString)).pathname;
return whitelist.some(function(whitelistedPathRegex) {
return path.match(whitelistedPathRegex);
});
};
var stripIgnoredUrlParameters = function (originalUrl,
ignoreUrlParametersMatching) {
var url = new URL(originalUrl);
url.search = url.search.slice(1) // Exclude initial '?'
.split('&') // Split into an array of 'key=value' strings
.map(function(kv) {
return kv.split('='); // Split each 'key=value' string into a [key, value] array
})
.filter(function(kv) {
return ignoreUrlParametersMatching.every(function(ignoredRegex) {
return !ignoredRegex.test(kv[0]); // Return true iff the key doesn't match any of the regexes.
});
})
.map(function(kv) {
return kv.join('='); // Join each [key, value] array into a 'key=value' string
})
.join('&'); // Join the array of 'key=value' strings into a string with '&' in between each
return url.toString();
};
var hashParamName = '_sw-precache';
var urlsToCacheKeys = new Map(
precacheConfig.map(function(item) {
var relativeUrl = item[0];
var hash = item[1];
var absoluteUrl = new URL(relativeUrl, self.location);
var cacheKey = createCacheKey(absoluteUrl, hashParamName, hash, false);
return [absoluteUrl.toString(), cacheKey];
})
);
function setOfCachedUrls(cache) {
return cache.keys().then(function(requests) {
return requests.map(function(request) {
return request.url;
});
}).then(function(urls) {
return new Set(urls);
});
}
self.addEventListener('install', function(event) {
event.waitUntil(
caches.open(cacheName).then(function(cache) {
return setOfCachedUrls(cache).then(function(cachedUrls) {
return Promise.all(
Array.from(urlsToCacheKeys.values()).map(function(cacheKey) {
// If we don't have a key matching url in the cache already, add it.
if (!cachedUrls.has(cacheKey)) {
return cache.add(new Request(cacheKey, {credentials: 'same-origin'}));
}
})
);
});
}).then(function() {
// Force the SW to transition from installing -> active state
return self.skipWaiting();
})
);
});
self.addEventListener('activate', function(event) {
var setOfExpectedUrls = new Set(urlsToCacheKeys.values());
event.waitUntil(
caches.open(cacheName).then(function(cache) {
return cache.keys().then(function(existingRequests) {
return Promise.all(
existingRequests.map(function(existingRequest) {
if (!setOfExpectedUrls.has(existingRequest.url)) {
return cache.delete(existingRequest);
}
})
);
});
}).then(function() {
return self.clients.claim();
})
);
});
self.addEventListener('fetch', function(event) {
if (event.request.method === 'GET') {
// Should we call event.respondWith() inside this fetch event handler?
// This needs to be determined synchronously, which will give other fetch
// handlers a chance to handle the request if need be.
var shouldRespond;
// First, remove all the ignored parameter and see if we have that URL
// in our cache. If so, great! shouldRespond will be true.
var url = stripIgnoredUrlParameters(event.request.url, ignoreUrlParametersMatching);
shouldRespond = urlsToCacheKeys.has(url);
// If shouldRespond is false, check again, this time with 'index.html'
// (or whatever the directoryIndex option is set to) at the end.
var directoryIndex = 'index.html';
if (!shouldRespond && directoryIndex) {
url = addDirectoryIndex(url, directoryIndex);
shouldRespond = urlsToCacheKeys.has(url);
}
// If shouldRespond is still false, check to see if this is a navigation
// request, and if so, whether the URL matches navigateFallbackWhitelist.
var navigateFallback = '';
if (!shouldRespond &&
navigateFallback &&
(event.request.mode === 'navigate') &&
isPathWhitelisted([], event.request.url)) {
url = new URL(navigateFallback, self.location).toString();
shouldRespond = urlsToCacheKeys.has(url);
}
// If shouldRespond was set to true at any point, then call
// event.respondWith(), using the appropriate cache key.
if (shouldRespond) {
event.respondWith(
caches.open(cacheName).then(function(cache) {
return cache.match(urlsToCacheKeys.get(url)).then(function(response) {
if (response) {
return response;
}
throw Error('The cached response that was expected is missing.');
});
}).catch(function(e) {
// Fall back to just fetch()ing the request if some unexpected error
// prevented the cached response from being valid.
console.warn('Couldn\'t serve response for "%s" from cache: %O', event.request.url, e);
return fetch(event.request);
})
);
}
}
});
Some data like
https://fonts.googleapis.com/css?family=Roboto+Slab:300,400,700
does not support offline mode download these file manually and add them with local path again.

Resources