So I'm allowing user's upload certain images in the storage of firebase, and each file is named after their user id. So when a user uploads an image, I get a url that for that file
https://firebasestorage.googleapis.com/v0/b/jobify-a7a4a.appspot.com/o/CZx5biMydzQAOx4T8WXJU5jt2852%2Fimages%2Fdreamy-waterfall-4k-sc.jpg?alt=media&token=8e4d60ed-e956-4f71-80d1-58755fe94dbe
And so when a user changes the uploaded image, I'd like to delete the previously updated image from the storage.
I've seen how they do it in the docs, but that requires the file name which I don't have access to.
This is the code when a user uploads an image
function uploadImg(e) {
const file = e.target.files[0];
if (file && file.size < 30000000) {
var storageRef = firebase
.storage()
.ref(`${user.uid}/images`)
.child(file.name);
const task = storageRef.put(file);
task.on(
"state_changes",
function progress(snap) {
setLoading(true);
const percentage = (snap.bytesTransferred / snap.totalBytes) * 100;
loadingref.current.style.height = percentage + "%";
},
function error() {
setNotifibool(true);
setNotifi({
text: "Try Again!",
icon: "fal fa-exclamation-circle"
});
},
function complete() {
setLoading(false);
storageRef.getDownloadURL().then((url) => {
setState(url);
});
setNotifi({
text: "Image Uploaded!",
icon: "fal fa-check-circle"
});
}
);
} else {
window.alert("too big");
}
}
You can translate the download URL you have back to a reference by calling firebase.storage().refFromUrl(downloadUrl). Then your can delete the file by calling delete on that reference.
Related
I am working with one application in which I am creating offline PDF and save them in file system.
Now the problem is when I delete the particular record I need to delete the PDF from file system I go through the file plugin but couldn't find any method related to that. I am using ionic 4 here are some peace of code.
if (this.plt.is('cordova')) {
this.pdfObj.getBuffer((buffer) => {
const blob = new Blob([buffer], { type: 'application/pdf' });
// Save the PDF to the data Directory of our App
this.file.writeFile(this.file.externalRootDirectory + '/Downloads/', 'ACCSYS-' +
this.randomString(4) + '-' + encodeURI(this.headerData.title) + '.pdf', blob, { replace: true }).then(fileEntry => {
// Open the PDf with the correct OS tools
setTimeout(() => {
this.hideLoader();
this.fileOpener.open(fileEntry.nativeURL, 'application/pdf');
this.pdfObj = null;
}, 1000);
});
});
} else {
setTimeout(() => {
this.hideLoader();
this.pdfObj.download();
this.pdfObj = null;
}, 500);
}
Assume I store the nativeURL in localstorage.
any idea how to delete the file ??
If you already have a fileEntry object you can use the remove() method to delete the file like this:
fileEntry.remove(function() {
// if the file has been successfully removed
}, function(error) {
// if there was an error removing the file
}, function() {
// if the file does not exist
});
See these links for more documentation and examples.
here is the perfect way to do it ( define window on the top of ts file )
delete() {
// this.fileHelper.removeFile();
const fileToRemove = this.remoteURL; // Change this with your file path
window.resolveLocalFileSystemURL( fileToRemove, (dirEntry) => {
dirEntry.remove(this.successHandler, this.errorHandler);
});
}
successHandler() {
console.log('Directory deleted successfully');
}
errorHandler() {
console.log('There is some error while deleting directory')
}
I want to retrieve list of images in one go from Amazon S3 based on image URL.
Currently I am able to fetch single image using the following code:-
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
AWS.config.region = region;
var bucketInstance = new AWS.S3();
var params = {
Bucket: bucketName,
Key: awsImgUrl
}
bucketInstance.getObject(params, function (err, file) {
if (file) {
var dataSrc = "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
callbackSuccess(dataSrc);
} else {
callbackSuccess("Error");
}
});
EncodeData = function (data) {
var str = data.reduce(function (a, b) { return a + String.fromCharCode(b) }, '');
return btoa(str).replace(/.{76}(?=.)/g, '$&\n');
}
In my scenario I have multiple S3 image url like awsImgUrl1, awsImgUrl2..awsImgUrln.
How to fetch it in one go instead of one by one?
You cannot get more than one image per api call with S3. You can however make multiple calls in parallel.
Using promises this is straightforward.
var bucketInstance = new AWS.S3();
var imageKeys = [ awsImgUrl1, awsImgUrl2, awsImgUrl3];
var promisesOfS3Objects = imageKeys.map(function(key) {
return bucketInstance.getObject({
Bucket: bucketName,
Key: key
}).promise()
.then(function (file) {
return "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
})
})
Promise.all(promisesOfS3Objects)
.then(callbackSuccess) // callbackSuccess is called with an array of string
.catch(function() { callbackSuccess("Error") })
You can change the way you upload the image data. Instead of uploading a single image, upload one document containing multiple image datas.
const addImageBlock = () => {
var photoBlock = [
{
imageId: 'id',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id2',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id3',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id4',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
}
//...ect
];
s3.upload({
Key: photoBlockId + '.json',
Body: photoBlock,
ACL: 'public-read'
}, function(err, data) {
if (err) {
return alert('There was an error', err.message);
}
});
}
Then when you receive this data with one s3 call, you can loop through and render the images on the frontend,
getObject(params, function (err, file) {
imageArr = [];
if (file) {
JSON.parse(file.toString()).map((image) => {
var image = new Image();
image.src = image.body;
imageArr.push(image)
})
callbackSuccess(imageArr);
}
else {
callbackSuccess("Error");
}
});
AWS SDK does not have any method to read multiple files as once and same with console, you can not download multiple files at once.
they have only GetObject method do read a object in bucket by key only.
so in your case you have to read one by one with their key name only if you already have key names as list..
you can get summary of objects in bucket if you would like to get list of objects then put a loop to download all files.
I have a image collection of following schema:
{
"productId": {type: String},
"imagePaths": {type: Array}
}
I want to upload multiple images and store corresponding image-paths to database.
I searched, and came to know that Multer can perform this action, but I am unable to upload and save image-paths for multiple images.
I tried with this Angular code,
scope.demoMethod = function(){
console.log('Method fired.');
var product = $scope.product;
$http.post('/demo/upload', $scope.product)
.then(function (res) {
console.log('Form Data ::' + $scope.product);
});
},
But I am getting error, cannot read property 'forEach' of undefined
At first you need to define disk storage, the place where your file should be saved
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, `${global.uploadDir}/temp`); // folder path
},
filename: (req, file, cb) => {
cb(null, `${uuid()}.${mime.extension(file.mimetype)}`); // file name here
}
});
multer = multer({ storage });
Then add multer in your routes as middleware
router.post('/upload', multer.any(), (req, res) => {
console.log(req.files) // should be array of files you have uploaded
const imagePaths = [];
req.files.forEach(f => {
imagePaths.push(f.path);
});
const product = new Product(); // model name
product.imagePaths = imagePaths;
...
product.save();
...
});
Also you can specify files' filter
const fileFilter = (req, file, cb) => {
const isAllowed = utils.isAllowed(mime.extension(file.mimetype)); // your validation condition here
if (!isAllowed) {
return cb(new Error('This file is not allowed'));
}
cb(null, true);
}
multer = multer({ storage, fileFilter });
and limit
const limits = { fileSize: 50 * 1024 * 1024 /* 50MB */ };
multer = multer({ storage, limits });
Other than workaround of calling fetch multiple times for multiple image files upload (looping through the files), on Frontend, How to upload multiple of image files by just calling fetch/Upload once? Could someone provide a simple example? Just like we do on Facebook.
Thanks in advance!
Update: I am done with looping logic in front end, now as there is loader on every image getting uploaded, Percent uploaded is getting calculated for all images in single value, how to split this value for all images separately?
Looping Logic
for (let i = 0; i <= e.target.files.length; i++){
let reader = new FileReader();
let file = e.target.files[i];
var self = this
reader.onloadstart = () => {
self.setState({ImageUploader: true})
}
reader.onloadend = () => {
var data = reader.result;
if (!file.type.includes('image')) {
alert('PLEASE CHOSE A IMAGE BRAH!')
} else if (file.size / (1024 * 1024) > 5) {
alert('PLEASE CHOSESmaller Image')
} else {
var url = 'https://api......'
var ifd = new FormData();
ifd.append('file', file)
axios({url: url,method: 'put',
onUploadProgress: function(progressEvent) {
var percentCompleted = Math.round((progressEvent.loaded * 100) / progressEvent.total);
self.setState({Completed: percentCompleted})
}, withCredentials: true, data: ifd}).then((res) => {
this.setState({ImageUploader: false})
this.setState({
image_id: this.state.image_id.concat(res.data.reason.image_id)
})
})
this.setState({
file: file,
imagePreviewUrl: this.state.imagePreviewUrl.concat(reader.result),
noImage: false,
ImageChoosen: true
});
}
}
reader.readAsDataURL(file)
}
My Angular 1 application saves files to S3 and allows for a wide variety of files types.
When I retrieve the objects I use the following code:
export function show(req, res) {
const s3 = new aws.S3();
const s3Params = {
Bucket: S3_BUCKET,
Key: req.query.key + ''
};
res.attachment(req.query.key + '');
var fileStream = s3.getObject(s3Params).createReadStream();
fileStream.pipe(res);
}
I would like to open the received file on the client in a new window (just like on the AWS console) but I can't figure out how to go about it.
For example on the client side does not work at all:
.then(
(data) => {
var file = new Blob([data], {type: 'application/pdf'});
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
}
)
I really don't understand how the concept of data streams works.
If you don't have to download pdf, you may open it directly from s3.
s3client.getResourceUrl("your-bucket", "some-path/some-key.jpg");
This will return you url to the file.
So you need code like:
export function show(req, res) {
this.s3client = new aws.S3({
accessKeyId: options.accessKeyId,
secretAccessKey: options.secretAccessKey,
region: options.region
})
let resourceUrl = s3client.getResourceUrl(S3_BUCKET, req.query.key + '');
window.open(resourceUrl, '_blank');
}
I'm sorry, can't test it right now, but try. Should work.
All I had to do was get a signedUrl for the resource for this to work much simpler than what I was trying to do.
export function show(req, res) {
const s3 = new aws.S3();
const s3Params = {
Bucket: S3_BUCKET,
Key: req.query.key + ''
};
s3.getSignedUrl('getObject', s3Params, (err, data) => {
if (err) {
console.log(err);
return res.end();
}
const returnData = {
signedRequest: data,
};
res.write(JSON.stringify(returnData));
res.end();
});
}
and on the client all I have to do is open the link in a new tab:
openDoc(doc) {
this.$http()
.then(
(data) => {
this.$window.open(data.data.signedRequest, '_blank')
}
)
.catch(
(err) => {
this.Notification.error('failed to download attachment');
}
)
}