Submit form(ReactJS) to Google Spreadsheet - change the message validation - reactjs

I want to register data in a Google Sheet from a ReactJS form (2 fields if the user has possible suggestion or comments).
This is my feedback form in React :
import React,{useState,useEffect} from 'react';
import './App.css';
const formUrl = 'https://script.google.com/macros/s/AK.../exec'
export default function FrmTable(){
const [loading,setLoading] = useState(false)
return(
<div className="section-form">
<form name="frm"
method="post"
action={formUrl}
>
<div className="form-elements">
<div className="pure-group">
<label className="pure-group-label">Suggestion content pdf</label>
<input id="Suggestion content pdf" name="Suggestion content pdf" className="pure-group-text"
type="text"
/>
</div>
<div className="pure-group">
<label className="pure-group-label" >Comments</label>
<textarea id="Comments" name="Comments" rows="10" className="pure-group-text"
placeholder=""
maxLength="1000"
></textarea>
</div>
</div>
<p className="loading-txt">{loading == true ? 'Loading.....' : ''}</p>
<div className="pure-group pure-group-btn">
<button className="button-success pure-button button-xlarge btn-style" >Send</button>
</div>
</form>
</div>
)
}
The GSheet script in order to register the suggestion content and comments :
var SHEET_NAME = "Feedback";
// 2. Run > setup
// 3. Publish > Deploy as web app
// - enter Project Version name and click 'Save New Version'
// - set security level and enable service (most likely execute as 'me' and access 'anyone, even anonymously)
// 4. Copy the 'Current web app URL' and post this in your form/script action
//
// 5. Insert column names on your destination sheet matching the parameter names of the data you are passing in (exactly matching case)
var SCRIPT_PROP = PropertiesService.getScriptProperties(); // new property service
// If you don't want to expose either GET or POST methods you can comment out the appropriate function
function doGet(e){
return handleResponse(e);
}
function doPost(e){
return handleResponse(e);
}
function handleResponse(e) {
// shortly after my original solution Google announced the LockService[1]
// this prevents concurrent access overwritting data
// we want a public lock, one that locks for all invocations
var lock = LockService.getPublicLock();
lock.waitLock(30000); // wait 30 seconds before conceding defeat.
try {
// next set where we write the data - you could write to multiple/alternate destinations
var doc = SpreadsheetApp.openById(SCRIPT_PROP.getProperty("key"));
var sheet = doc.getSheetByName(SHEET_NAME);
// we'll assume header is in row 1 but you can override with header_row in GET/POST data
var headRow = e.parameter.header_row || 1;
var headers = sheet.getRange(1, 1, 1, sheet.getLastColumn()).getValues()[0];
var nextRow = sheet.getLastRow()+1; // get next row
var row = [];
// loop through the header columns
for (i in headers){
if (headers[i] == "Timestamp"){ // special case if you include a 'Timestamp(Date)' column
row.push(new Date());
} else { // else use header name to get data
row.push(e.parameter[headers[i]]);
}
}
// more efficient to set values as [][] array than individually
sheet.getRange(nextRow, 1, 1, row.length).setValues([row]);
// return json success results
return ContentService
.createTextOutput(JSON.stringify({"result":"success", "row": nextRow}))
.setMimeType(ContentService.MimeType.JSON);
} catch(e){
// if error return this
return ContentService
.createTextOutput(JSON.stringify({"result":"error", "error": e}))
.setMimeType(ContentService.MimeType.JSON);
} finally { //release lock
lock.releaseLock();
}
}
function setup() {
var doc = SpreadsheetApp.getActiveSpreadsheet();
SCRIPT_PROP.setProperty("key", doc.getId());
}
Everything works fine I can register the 2 fields(suggestion and comment) in the GSheet but I would like to have another view after submiting
I've followed some tutorials because I'm new into React. At the end after submiting you are sent to script.googleusercontent.... because in the GSheet script we have this code
return ContentService
.createTextOutput(JSON.stringify({"result":"success", "row": nextRow}))
.setMimeType(ContentService.MimeType.JSON);
I want just to show a simple message like a popup in order to say the submit form is ok. Any idea is welcomed :) thank you very much.
New Edit : I've changed my code (React + Google Script) but I have an CORB blocked cross-origin.
import React,{useState,useEffect} from 'react';
import './App.css';
const formUrl = 'https://script.google.com/macros/s/AKfycbz4hMELOHff2Yd_ozpOid2cAWFSWPm_7AOD15OIeQRdYrocv0wa/exec'
export default function FrmTable(){
const jsonp = (url, callback) => {
var callbackName = 'jsonp_callback_' + Math.round(100000 * Math.random());
window[callbackName] = function(data) {
alert("Formulaire envoyé ");
delete window[callbackName];
document.body.removeChild(script);
callback(data);
};
var script = document.createElement('script');
script.src = url + (url.indexOf('?') >= 0 ? '&' : '?') + 'callback=' + callbackName;
document.body.appendChild(script);
}
const mySubmitHandler = (event) => {
event.preventDefault();
jsonp(formUrl + '?La+FAQ+en+question=' + encodeURIComponent(faqName), (data) => {
// alert(data);
});
event.target.reset();
}
// const { register, errors, required ,handleSubmit } = useForm();
const [loading,setLoading] = useState(false)
const [faqName,setFaqName] = useState('')
const myChangeHandler1 = (event) => {
setFaqName(event.target.value);
}
return(
<div className="section-form" >
<form name="frm"
method="post"
onSubmit={mySubmitHandler}
>
<div className="form-elements">
<div className="pure-group ">
<label className="pure-group-label">La FAQ en question </label>
<input name="FAQ en question" className="pure-group-text"
type="text" onChange={myChangeHandler1}
/>
</div>
</div>
<input type='submit' />
</form>
</div>
)
}
The Google Script :
var SCRIPT_PROP = PropertiesService.getScriptProperties(); // new property service
// If you don't want to expose either GET or POST methods you can comment out the appropriate function
function doGet(e){
return handleResponse(e);
}
function doPost(e){
//return handleResponse(e);
}
function handleResponse(e) {
// shortly after my original solution Google announced the LockService[1]
// this prevents concurrent access overwritting data
// [1] http://googleappsdeveloper.blogspot.co.uk/2011/10/concurrency-and-google-apps-script.html
// we want a public lock, one that locks for all invocations
var lock = LockService.getPublicLock();
lock.waitLock(30000); // wait 30 seconds before conceding defeat.
try {
// next set where we write the data - you could write to multiple/alternate destinations
var doc = SpreadsheetApp.openById(SCRIPT_PROP.getProperty("key"));
var sheet = doc.getSheetByName(SHEET_NAME);
// we'll assume header is in row 1 but you can override with header_row in GET/POST data
var headRow = e.parameter.header_row || 1;
var headers = sheet.getRange(1, 1, 1, sheet.getLastColumn()).getValues()[0];
var nextRow = sheet.getLastRow()+1; // get next row
var row = [];
// loop through the header columns
for (i in headers){
if (headers[i] == "Timestamp"){ // special case if you include a 'Timestamp(Date)' column
row.push(new Date());
} else { // else use header name to get data
row.push(e.parameter[headers[i]]);
}
}
// more efficient to set values as [][] array than individually
sheet.getRange(nextRow, 1, 1, row.length).setValues([row]);
var callback = e.parameter.callback;
// return json success results
// return ContentService
// .createTextOutput(JSON.stringify({"result":"success", "row": nextRow}))
// .setMimeType(ContentService.MimeType.JSON);
return ContentService.createTextOutput(callback+'('+ JSON.stringify({"result":"success", "row": nextRow})+')').setMimeType(ContentService.MimeType.JAVASCRIPT);
} catch(error){
// if error return this
//return ContentService
// .createTextOutput(JSON.stringify({"result":"error", "error": e}))
// .setMimeType(ContentService.MimeType.JSON);
var callback = e.parameter.callback;
return ContentService.createTextOutput(callback+'('+ JSON.stringify({"result":"error", "error": error})+')').setMimeType(ContentService.MimeType.JAVASCRIPT);
} finally { //release lock
lock.releaseLock();
}
}
function setup() {
var doc = SpreadsheetApp.getActiveSpreadsheet();
SCRIPT_PROP.setProperty("key", doc.getId());
}

I would like to propose the following 2 patterns.
Modification points:
In the current stage, there is no getPublicLock().
In your shared Spreadsheet, there is only one sheet of Sheet2. But at var SHEET_NAME = "Feedback";, no sheet name is used. By this, var sheet = doc.getSheetByName(SHEET_NAME); is null and an error occurs at var headers = sheet.getRange(1, 1, 1, sheet.getLastColumn()).getValues()[0].
At formUrl + "&Commentaires=" + encodeURIComponent(faqComment) in react side, in this case, the endpoint becomes https://script.google.com/macros/s/###/exec&Commentaires=test?callback=jsonp_callback_###.
Pattern 1:
In this pattern, your script is modified and JSONP is used.
React side: App.js
From:
jsonp(formUrl + "&Commentaires=" + encodeURIComponent(faqComment), data => {
// alert(data);
});
To:
jsonp(formUrl + "?Commentaires=" + encodeURIComponent(faqComment), data => {
// alert(data);
});
Google Apps Script side:
When you want to use your shared Spreadsheet, please modify as follows.
From:
var lock = LockService.getPublicLock();
To:
var lock = LockService.getDocumentLock();
And,
From:
var SHEET_NAME = "Feedback";
To:
var SHEET_NAME = "Sheet2";
In this case, you can also modify the sheet name from Sheet2 to Feedback instead of modifying the script.
Pattern 2:
In this pattern, your script is modified and fetch is used instead of JSONP. Because when above script is used, I could confirm that there is sometimes the error related to CORS. So as another pattern, I would like to propose to use fetch. When fetch is used, I could confirm that no error related to CORS occurs.
React side: App.js
From:
export default function FrmTable() {
const jsonp = (url, callback) => {
var callbackName = "jsonp_callback_" + Math.round(100000 * Math.random());
window[callbackName] = function(data) {
alert("Formulaire envoyé ");
delete window[callbackName];
document.body.removeChild(script);
callback(data);
};
var script = document.createElement("script");
script.src =
url + (url.indexOf("?") >= 0 ? "&" : "?") + "callback=" + callbackName;
document.body.appendChild(script);
};
const mySubmitHandler = event => {
event.preventDefault();
/* const request = new XMLHttpRequest();
const formData = new FormData();
formData.append("La FAQ en question", form.faqName);
formData.append("Suggestion contenu pdf", form.faqSuggest);
formData.append("Commentaires", form.faqComment);
request.open("POST", formUrl);
request.send(formData); */
jsonp(formUrl + "&Commentaires=" + encodeURIComponent(faqComment), data => {
// alert(data);
});
event.target.reset();
};
To:
export default function FrmTable() {
const mySubmitHandler = event => {
event.preventDefault();
fetch(formUrl + "?Commentaires=" + encodeURIComponent(faqComment))
.then(res => res.text())
.then(res => console.log(res))
.catch(err => console.error(err));
event.target.reset();
};
In this case, res.json() can be also used instead of res.text().
Google Apps Script side:
When you want to use your shared Spreadsheet, please modify as follows.
From:
var lock = LockService.getPublicLock();
To:
var lock = LockService.getDocumentLock();
And,
From:
var callback = e.parameter.callback;
return ContentService.createTextOutput(callback+'('+ JSON.stringify({"result":"success", "row": nextRow})+')').setMimeType(ContentService.MimeType.JAVASCRIPT);
} catch(error){
var callback = e.parameter.callback;
return ContentService.createTextOutput(callback+'('+ JSON.stringify({"result":"error", "error": error})+')').setMimeType(ContentService.MimeType.JAVASCRIPT);
} finally { //release lock
lock.releaseLock();
}
To:
return ContentService.createTextOutput(JSON.stringify({"result":"success", "row": nextRow})).setMimeType(ContentService.MimeType.JSON);
} catch(error){
return ContentService.createTextOutput(JSON.stringify({"result":"error", "error": error})).setMimeType(ContentService.MimeType.JSON);
} finally { //release lock
lock.releaseLock();
}
And,
From:
var SHEET_NAME = "Feedback";
To:
var SHEET_NAME = "Sheet2";
In this case, you can also modify the sheet name from Sheet2 to Feedback instead of modifying the script.
Note:
When you modified the script of Web Apps, please redeploy the Web Apps as new version. By this, the latest script is reflected to the Web Apps. Please be careful this.
References:
Web Apps
Taking advantage of Web Apps with Google Apps Script
Class LockService

Related

Post a message from iframe in React

I have trouble about sending message from cross-domain iframe in React. I read many articles, most of them are about sending message to iframe.
The issue is that it didn't show any error message in the page that embed the iframe , and when I go to the see the page that I embed, it did show a error message.
Scene.js:230 Failed to execute 'postMessage' on 'DOMWindow': The target origin provided ('https://thewebsite.com') does not match the recipient window's origin ('https://mywebsite').
so I can't tell if I the message have been sent successfully or not.
Here is my code :
confirm = () => {
const { homeId, correctData } = this.state
const form = new FormData();
//process data
form.append('scene_id', homeId)
form.append('material_id', correctData[0].id)
form.append('material_img', correctData[0].component_img)
const obj = JSON.parse(JSON.stringify(form));
//
//way 1
parent.postMessage(obj, '*')
//way 2
parent.postMessage(obj, 'https://www.thewebsite.com/pro_wall.html')
//way 3
window.frames.postMessage(obj, '*')
//way 4
window.top.postMessage(obj, '*')
//way 5
const targetWindow = window.open('https://www.thewebsite.com/pro_wall.html')
setTimeout(() => {
targetWindow?.postMessage(obj, '*')
}, 3000)
}
Sorry for writing too many ways to post message, Just want to make sure I tried every possibility.
After few tries, I got positive feedback from the client. They got data. This is my code I wrote eventually.
confirm = () => {
const { homeId, correctData } = this.state
const formData = new FormData();
formData.append('scene_id', homeId)
formData.append('material_id', correctData[0]?.id)
formData.append('material_img', correctData[0]?.component_img)
var object = {};
formData.forEach((value, key) => {object[key] = value});
var json = JSON.stringify(object);
parent.postMessage(json, `https://www.thewebsite.com/pro_wall.html`)
}
and I saw the code at client's side from web devTool, it looks like this,
<script>
window.addEventListener("message", receivewall, false);
function receivewall(event){
var origin = event.origin;
if(origin == 'https://mywebsite'){
var params = JSON.parse(event.data);
$('#result').html($.param(params));
// console.log(params);
}
// $('#result').html(data);
}
function getQueryVariable(query) {
var vars = query.split('&');
var params = {};
for (var i = 0; i < vars.length; i++) {
var pair = vars[i].split('=');
params[decodeURIComponent(pair[0])] = decodeURIComponent(pair[1]);
}
return params;
}
</script>

get download url from multiple file upload firebase storage

I am new in firebase and angularjs and i am having difficulties in getting download url from firebase storage and store them in firebase realtime database.
I was able to upload multiple files to firebase storage. the problem is when i store the download url into firebase realtime database, all database url value are same.It should different based each files downloadURL.
Here my script:
$scope.submitPhotos = function(file){
console.log(file);
var updateAlbum = [];
for (var i = 0; i < file.length; i++) {
var storageRef=firebase.storage().ref(albumtitle).child(file[i].name);
var task=storageRef.put(file[i]);
task.on('state_changed', function progress(snapshot){
var percentage=( snapshot.bytesTransferred / snapshot.totalBytes )*100;
if (percentage==100){
storageRef.getDownloadURL().then(function(url) {
var galleryRef = firebase.database().ref('gallery/'+albumkey);
var postkey = firebase.database().ref('gallery/'+albumkey).push().key;
updateAlbum={img:url};
firebase.database().ref('gallery/'+ albumkey+'/'+postkey).update(updateAlbum);
});
};
})
};
};
As you can see i was able store the url into database but all of the urls are same. What i need is every key store each different links from storage.
Any helps appreciated. Thanks
function uploadImg(file,i) {
return new Promise((resolve,reject)=>{
var storageRef=firebase.storage().ref("store-images/"+file[i].file.name);
task = storageRef.put(file[i].file);
task.on('state_changed', function progress(snapshot){
var percentage=( snapshot.bytesTransferred / snapshot.totalBytes )*100;
console.log(percentage);
// use the percentage as you wish, to show progress of an upload for example
}, // use the function below for error handling
function (error) {
console.log(error);
},
function complete () //This function executes after a successful upload
{
task.snapshot.ref.getDownloadURL().then(function(downloadURL) {
resolve(downloadURL)
});
});
})
}
async function putImage(file) {
for (var i = 0; i < file.length; i++) {
var dd = await uploadImg(file,i);
firebase.database().ref().child('gallery').push(dd);
}
}
Try using the code below:
$scope.submitPhotos = function(file){
console.log(file);
var updateAlbum = [];
for (var i = 0; i < file.length; i++) {
var storageRef=firebase.storage().ref(albumtitle).child(file[i].name);
var task=storageRef.put(file[i]);
task.on('state_changed', function progress(snapshot)
{
var percentage=( snapshot.bytesTransferred / snapshot.totalBytes )*100;
// use the percentage as you wish, to show progress of an upload for example
}, // use the function below for error handling
function (error) {
switch (error.code) {
case 'storage/unauthorized':
// User doesn't have permission to access the object
break;
case 'storage/canceled':
// User canceled the upload
break;
case 'storage/unknown':
// Unknown error occurred, inspect error.serverResponse
break;
}
}, function complete () //This function executes after a successful upload
{
let dwnURL = task.snapshot.downloadURL;
let galleryRef = firebase.database().ref('gallery/'+albumkey);
let postkey = firebase.database().ref('gallery/'+albumkey).push().key;
updateAlbum={img:dwnURL};
firebase.database().ref('gallery/'+ albumkey+'/'+postkey).update(updateAlbum);
});
};
};
All the best!

Get an image of a vbhtml view as a byte array and save it to an oracle database

I need help on an mvc application in vb.net. In general terms I need to receive an image through the view and get it to work on the controller. I need to do this to convert the image to a byte array and save it to an oracle database. So my idea is to get the image and in the controller to convert it to a byte array or maybe there is some way to get the image already as a byte array and pass that array to the controller to save it to the database.
something like this its my View :
<div class="span11">
<div class="span4" id="depnac">
#Html.LabelFor(Function(m) m.DepNacPER)
#Html.DropDownListFor(Function(m) m.DepNacPER, Model.DepNacPER, New With {.class = "form-control"})
</div>
and this is my Model :
<Display(Name:="Region of birth")>
<Required(ErrorMessage:="you must select a option")>
Property DepNacPER As SelectList
I'm working on an ASP.NET Core app right now that uploads images. The image comes through to the controller via the request as a Stream. I'm then creating an Image object from that Stream but you could just read the data from it directly. That said, you might want to try to create an Image object to confirm that the data does represent a valid image.
Here's some relevant code from the view's script:
function uploadImage()
{
// This is a file upload control in a hidden div.
var image = $("#imageFile");
if (image[0].files.length > 0)
{
var formData = new FormData();
formData.append(image[0].files[0].name, image[0].files[0]);
var xhr = new XMLHttpRequest();
xhr.open("POST", "#Url.Content("~/events/uploadimage")");
xhr.send(formData);
xhr.onreadystatechange = function ()
{
if (xhr.readyState === 4 && xhr.status === 200)
{
var response = JSON.parse(xhr.responseText);
if (response.saveSuccessful)
{
// ...
} else
{
window.location.replace("#Url.Content("~/error")");
}
}
}
xhr.onerror = function(err, result)
{
alert("Error: " + err.responseText);
}
}
}
I'm in the process of replacing that code with some jQuery that does the heavy lifting but haven't got that far yet.
Here's some relevant code from the action:
[HttpPost]
public IActionResult UploadImage()
{
var requestForm = Request.Form;
StringValues tempImageFileNames;
string tempImageFileName = null;
string imageUrl = null;
var saveSuccessful = true;
var requestFiles = requestForm.Files;
if (requestFiles.Count > 0)
{
// A file has been uploaded.
var file = requestFiles[0];
using (var stream = file.OpenReadStream())
{
try
{
using (var originalImage = System.Drawing.Image.FromStream(stream))
{
// Do whatever you like with the Image here.
}
}
catch (Exception)
{
saveSuccessful = false;
}
}
}
if (saveSuccessful)
{
return Json(new {saveSuccessful, tempImageFileName, imageUrl});
}
else
{
return Json(new {saveSuccessful});
}
}
Sorry, it didn't occur to me at first that you're after VB code and this is C#. Hopefully you can still get the idea and I'll take the hit if someone dislikes the answer.

Lighthouse/Service Worker, how to return http 200 when offline

My application currently uses webpack,angular js, and a service worker.
Using sw-precache plugin to create my service worker.
https://www.npmjs.com/package/sw-precache-webpack-plugin
The service worker caching is going well and I can see my static resources being fetched from serviceworker.js from chrome dev tools.
Now when I run the lighthouse report I am getting the following error still :
URL responds with a 200 when offline
https://github.com/GoogleChrome/lighthouse
In Dev tools when I switch on offline, I can actually see my page load. Some errors in console for some 3rd party scripts failing. Is this the reason for not getting url response 200 because I have some console errors from 3rd party i.e. sample error :
GET https://fonts.googleapis.com/css?family=Roboto+Slab:300,400,700 net::ERR_INTERNET_DISCONNECTED
What exactly is this audit looking for, and how can I achieve it ?
Edit : I added a picture of my network tab when I turn on offline, as I said the page loads fine. I notice my sw.js get's loaded from disk cache which I don't notice on other sites so could be something there.
Also here is sw.js content
'use strict';
var precacheConfig = [["/css/app.styles.77e2a0c3e7ac001193566741984a07f0.css","77e2a0c3e7ac001193566741984a07f0"],["/css/vendor.styles.582e79ead0684a8fb648ce9e543ad810.css","582e79ead0684a8fb648ce9e543ad810"],["/favicon.ico","70ef569d9a12f6873e86ed57d575cf13"],["/fonts/MaterialIcons-Regular.eot","e79bfd88537def476913f3ed52f4f4b3"],["/fonts/MaterialIcons-Regular.svg","a1adea65594c502f9d9428f13ae210e1"],["/fonts/MaterialIcons-Regular.ttf","a37b0c01c0baf1888ca812cc0508f6e2"],["/fonts/MaterialIcons-Regular.woff","012cf6a10129e2275d79d6adac7f3b02"],["/fonts/MaterialIcons-Regular.woff2","570eb83859dc23dd0eec423a49e147fe"],["/icons/launcher-icon-2x.png","91896b953c39df7c40b4772100971220"],["/icons/launcher-icon-3x.png","0aee2add7f56559aeae9555e495c3881"],["/icons/launcher-icon-4x.png","b164109dd7640b14aaf076d55a0a637b"],["/images/aa_logo_only.png","b5b46a8c2ead9846df1f1d3035634310"],["/images/developer.png","e8df747b292fe6f5eb2403c7180c31da"],["/images/facebook.png","8ab42157d0974099a72e151c23073022"],["/images/home-bg.jpeg","0a0f7da8574b037463af2f1205801e56"],["/images/logo.png","e8712312e08ca427d79a9bf34aedd6fc"],["/images/map.png","af3443ef4ab2890cae371c7a3de437ed"],["/images/pattern.png","114d593511446b9a4c6e340f7fef5c84"],["/images/twitter.png","99da44949cd33e16d2d551d42559eaf2"],["/index.html","1e9b5c4b3abba7e13d8d28c98cfb3bb5"],["/js/app.d9ada27616bf469d794d.js","8e2fc74de7d5c122ab8f0aca7e31b075"],["/js/vendor.d9ada27616bf469d794d.js","3bbba4569b6f3b88881b0533260905fe"],["/manifest.json","4bea29155995b63a9f2855637c0fe74c"]];
var cacheName = 'sw-precache-v2-45-' + (self.registration ? self.registration.scope : '');
var ignoreUrlParametersMatching = [/^utm_/];
var addDirectoryIndex = function (originalUrl, index) {
var url = new URL(originalUrl);
if (url.pathname.slice(-1) === '/') {
url.pathname += index;
}
return url.toString();
};
var createCacheKey = function (originalUrl, paramName, paramValue,
dontCacheBustUrlsMatching) {
// Create a new URL object to avoid modifying originalUrl.
var url = new URL(originalUrl);
// If dontCacheBustUrlsMatching is not set, or if we don't have a match,
// then add in the extra cache-busting URL parameter.
if (!dontCacheBustUrlsMatching ||
!(url.toString().match(dontCacheBustUrlsMatching))) {
url.search += (url.search ? '&' : '') +
encodeURIComponent(paramName) + '=' + encodeURIComponent(paramValue);
}
return url.toString();
};
var isPathWhitelisted = function (whitelist, absoluteUrlString) {
// If the whitelist is empty, then consider all URLs to be whitelisted.
if (whitelist.length === 0) {
return true;
}
// Otherwise compare each path regex to the path of the URL passed in.
var path = (new URL(absoluteUrlString)).pathname;
return whitelist.some(function(whitelistedPathRegex) {
return path.match(whitelistedPathRegex);
});
};
var stripIgnoredUrlParameters = function (originalUrl,
ignoreUrlParametersMatching) {
var url = new URL(originalUrl);
url.search = url.search.slice(1) // Exclude initial '?'
.split('&') // Split into an array of 'key=value' strings
.map(function(kv) {
return kv.split('='); // Split each 'key=value' string into a [key, value] array
})
.filter(function(kv) {
return ignoreUrlParametersMatching.every(function(ignoredRegex) {
return !ignoredRegex.test(kv[0]); // Return true iff the key doesn't match any of the regexes.
});
})
.map(function(kv) {
return kv.join('='); // Join each [key, value] array into a 'key=value' string
})
.join('&'); // Join the array of 'key=value' strings into a string with '&' in between each
return url.toString();
};
var hashParamName = '_sw-precache';
var urlsToCacheKeys = new Map(
precacheConfig.map(function(item) {
var relativeUrl = item[0];
var hash = item[1];
var absoluteUrl = new URL(relativeUrl, self.location);
var cacheKey = createCacheKey(absoluteUrl, hashParamName, hash, false);
return [absoluteUrl.toString(), cacheKey];
})
);
function setOfCachedUrls(cache) {
return cache.keys().then(function(requests) {
return requests.map(function(request) {
return request.url;
});
}).then(function(urls) {
return new Set(urls);
});
}
self.addEventListener('install', function(event) {
event.waitUntil(
caches.open(cacheName).then(function(cache) {
return setOfCachedUrls(cache).then(function(cachedUrls) {
return Promise.all(
Array.from(urlsToCacheKeys.values()).map(function(cacheKey) {
// If we don't have a key matching url in the cache already, add it.
if (!cachedUrls.has(cacheKey)) {
return cache.add(new Request(cacheKey, {credentials: 'same-origin'}));
}
})
);
});
}).then(function() {
// Force the SW to transition from installing -> active state
return self.skipWaiting();
})
);
});
self.addEventListener('activate', function(event) {
var setOfExpectedUrls = new Set(urlsToCacheKeys.values());
event.waitUntil(
caches.open(cacheName).then(function(cache) {
return cache.keys().then(function(existingRequests) {
return Promise.all(
existingRequests.map(function(existingRequest) {
if (!setOfExpectedUrls.has(existingRequest.url)) {
return cache.delete(existingRequest);
}
})
);
});
}).then(function() {
return self.clients.claim();
})
);
});
self.addEventListener('fetch', function(event) {
if (event.request.method === 'GET') {
// Should we call event.respondWith() inside this fetch event handler?
// This needs to be determined synchronously, which will give other fetch
// handlers a chance to handle the request if need be.
var shouldRespond;
// First, remove all the ignored parameter and see if we have that URL
// in our cache. If so, great! shouldRespond will be true.
var url = stripIgnoredUrlParameters(event.request.url, ignoreUrlParametersMatching);
shouldRespond = urlsToCacheKeys.has(url);
// If shouldRespond is false, check again, this time with 'index.html'
// (or whatever the directoryIndex option is set to) at the end.
var directoryIndex = 'index.html';
if (!shouldRespond && directoryIndex) {
url = addDirectoryIndex(url, directoryIndex);
shouldRespond = urlsToCacheKeys.has(url);
}
// If shouldRespond is still false, check to see if this is a navigation
// request, and if so, whether the URL matches navigateFallbackWhitelist.
var navigateFallback = '';
if (!shouldRespond &&
navigateFallback &&
(event.request.mode === 'navigate') &&
isPathWhitelisted([], event.request.url)) {
url = new URL(navigateFallback, self.location).toString();
shouldRespond = urlsToCacheKeys.has(url);
}
// If shouldRespond was set to true at any point, then call
// event.respondWith(), using the appropriate cache key.
if (shouldRespond) {
event.respondWith(
caches.open(cacheName).then(function(cache) {
return cache.match(urlsToCacheKeys.get(url)).then(function(response) {
if (response) {
return response;
}
throw Error('The cached response that was expected is missing.');
});
}).catch(function(e) {
// Fall back to just fetch()ing the request if some unexpected error
// prevented the cached response from being valid.
console.warn('Couldn\'t serve response for "%s" from cache: %O', event.request.url, e);
return fetch(event.request);
})
);
}
}
});
Some data like
https://fonts.googleapis.com/css?family=Roboto+Slab:300,400,700
does not support offline mode download these file manually and add them with local path again.

why cant i write a second file to gridfs without refreshing template view in a MEAN app?

I am having a problem writing a second audio file to gridfs. I am using ng-file-upload to pick a file, pass the file/binary data to gridfs, and write it to the database. The first file works fine, however unless i load a different view first, then go back to my audio view/template page i get this error everytime:
TypeError: path must be a string
at TypeError (native)
at Object.fs.open (fs.js:540:11)
at ReadStream.open (fs.js:1673:6)
at new ReadStream (fs.js:1660:10)
at Object.fs.createReadStream (fs.js:1608:10)
at NativeConnection.<anonymous> (Z:\techFilez\webApp\MyPiCloud\routes\IO\writeFile.js:42:29)
at NativeConnection.g (events.js:260:16)
at emitNone (events.js:67:13)
at NativeConnection.emit (events.js:166:7)
at open (Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\connection.js:518:11)
at NativeConnection.Connection.onOpen (Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\connection.js:527:5)
at Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\connection.js:487:11
at Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\drivers\node-mongodb-native\connection.js:71:5
at Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\node_modules\mongodb\lib\db.js:232:5
at connectHandler (Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\node_modules\mongodb\lib\server.js:333:7)
at g (events.js:260:16)
Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\node_modules\mongodb\lib\server.js:336
process.nextTick(function() { throw err; })
^
TypeError: path must be a string
at TypeError (native)
at Object.fs.open (fs.js:540:11)
at ReadStream.open (fs.js:1673:6)
at new ReadStream (fs.js:1660:10)
at Object.fs.createReadStream (fs.js:1608:10)
at NativeConnection.<anonymous> (Z:\techFilez\webApp\MyPiCloud\routes\IO\writeFile.js:42:29)
at NativeConnection.g (events.js:260:16)
at emitNone (events.js:67:13)
at NativeConnection.emit (events.js:166:7)
at open (Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\connection.js:518:11)
at NativeConnection.Connection.onOpen (Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\connection.js:527:5)
at Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\connection.js:487:11
at Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\lib\drivers\node-mongodb-native\connection.js:71:5
at Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\node_modules\mongodb\lib\db.js:232:5
at connectHandler (Z:\techFilez\webApp\MyPiCloud\node_modules\mongoose\node_modules\mongodb\lib\server.js:333:7)
at g (events.js:260:16)
[nodemon] app crashed - waiting for file changes before starting...
I have done several things:
i am making sure to use fs.unlink(myFile) delete from filesystem, as well as close the connection to mongoose after every successful file write.
I am using a variable
var myFile = file.path (which is the file path from ng-file-upload) - after the successful write i have tried setting myFile to null to no avail.
It works when i switch views or refresh the page, but since im going for the single page app thing i would like to avoid this.....
here is my code on the server side:
/**
* Created by foolishklown on 8/27/2016.
*/
var mongoose = require('mongoose'),
path = require('path'),
Grid = require('gridfs-stream'),
fs = require('fs'),
User = require('../../models/user');
module.exports = function(file, userId, fileType, res) {
var fileId;
//console.log('called the write file for gridfs'.green);
//console.log('file is: ', file);
var conn = mongoose.createConnection('mongodb://localhost/media', (error) => {
if(error) {
console.error('Error connecting to mongod media instance'.red);
process.exit(1);
} else {
console.info('Connected successfully to mongod media instance in the write file!'.blue);
}
});
// The following line is designating a file to grab/read, and save into mongo
// in our case it will be something from ng-file-upload that the user wants to upload
var myFile = file.path;
// Connect gridFs and mongo
Grid.mongo = mongoose.mongo;
conn.once('open', function () {
console.log('connection open, ready for I/O!');
var gfs = Grid(conn.db);
// This write stream is how well write to mongo
var writeStream = gfs.createWriteStream({
// Name the file the way you want it stored in mongo
filename: file.originalFilename,
type: fileType
});
// Create a read stream, so that we can read its data, and then with that well use the
// write stream to write to the DB via piping the writestream
var readStream = fs.createReadStream(myFile)
.on('end', () => {
writeToUserDb(userId, fileType, readStream.id);
res.status(200).send({id: readStream.id, type: fileType, user: userId});
})
.on('error', () => {
res.status(500).send('error in writing with gridfs');
})
.pipe(writeStream);
//fs.createReadStream(myFile).pipe(writeStream);
writeStream.on('close', function (file) {
console.log(file.filename + 'written to DB');
/**
setTimeout(1000, () => {
fs.unlink(myFile);
});
*/
fs.unlink(myFile);
myFile = null;
conn.close();
});
});
function writeToUserDb(uid, type, fileId) {
var userConn = mongoose.createConnection('mongodb://localhost/mean-auth', (error) => {
if(error) {
console.error('Error connecting to the mean-auth instance'.red);
process.exit(1);
} else {
console.info('Connected to the mean-auth instance!'.blue);
console.info('Attempting to find user: ' + uid + ', filetype: ' + type + ', streamID: ' + fileId + '!'.green);
User.findById(uid, (err, doc) => {
if(err) {
console.error('Error finding user with id: ', uid);
process.exit(1);
} else {
console.log('original doc: ', doc);
doc.addMedia(type, fileId);
doc.save();
console.log('new doc: ', doc);
}
})
}
});
userConn.close();
}
};
alas, it was not an issue on the back end at all, it was an angular issue with ng-file upload.......
my html:
<div class="row">
<div class="col-lg-10">
<h1>MyPi Audio</h1>
</div>
<div class="col-lg-2">
<form name="form">
<input type="file" id="file1" name="file" ng-files="getTheFiles($files)"
ng-keep="false"
accept="'audio/*'">
<input type="button" class="btn btn-default" ng-click="uploadFiles('/dashboard/uploadAudio', 'audio', myCont.currentUser)" value="Upload">
</form>
</div>
</div>
<div class="row">
<div ng-repeat="audio in myCont.media.audio" style="width:100%;background-color:#999999;border:1px solid deeppink;">
<img ng-src="{{audio.url}}" />
<span>{{audio.name}}</span>
</div>
</div>
Inside the controller, building form data was not resetting......
the OLD code:
var formdata = new FormData();
$scope.getTheFiles = function ($files) {
angular.forEach($files, function (value, key) {
//console.log('key: ' + key + ', val: ' + $files[key]);
formdata.append(key, value);
});
}
};
After the response was sent back from the server, i simply called $scope.getTheFiles(null), and added a check for null conditions in the function which reset the form data
var formdata = new FormData();
$scope.getTheFiles = function ($files) {
if($files == null) {
formdata = new FormData();
} else {
angular.forEach($files, function (value, key) {
//console.log('key: ' + key + ', val: ' + $files[key]);
formdata.append(key, value);
});
}
};
I answered my question in case anyone else has the same issue in the future

Resources