Other than workaround of calling fetch multiple times for multiple image files upload (looping through the files), on Frontend, How to upload multiple of image files by just calling fetch/Upload once? Could someone provide a simple example? Just like we do on Facebook.
Thanks in advance!
Update: I am done with looping logic in front end, now as there is loader on every image getting uploaded, Percent uploaded is getting calculated for all images in single value, how to split this value for all images separately?
Looping Logic
for (let i = 0; i <= e.target.files.length; i++){
let reader = new FileReader();
let file = e.target.files[i];
var self = this
reader.onloadstart = () => {
self.setState({ImageUploader: true})
}
reader.onloadend = () => {
var data = reader.result;
if (!file.type.includes('image')) {
alert('PLEASE CHOSE A IMAGE BRAH!')
} else if (file.size / (1024 * 1024) > 5) {
alert('PLEASE CHOSESmaller Image')
} else {
var url = 'https://api......'
var ifd = new FormData();
ifd.append('file', file)
axios({url: url,method: 'put',
onUploadProgress: function(progressEvent) {
var percentCompleted = Math.round((progressEvent.loaded * 100) / progressEvent.total);
self.setState({Completed: percentCompleted})
}, withCredentials: true, data: ifd}).then((res) => {
this.setState({ImageUploader: false})
this.setState({
image_id: this.state.image_id.concat(res.data.reason.image_id)
})
})
this.setState({
file: file,
imagePreviewUrl: this.state.imagePreviewUrl.concat(reader.result),
noImage: false,
ImageChoosen: true
});
}
}
reader.readAsDataURL(file)
}
Related
I am using following code in Vue. Here I am uploading multiple files at the same time using multiple axios requests. Everything working fine in this code.
But I want a way to see how much % a file uploaded. See there is array this.upload of files object having progress property. So I want to update this property for specific object. so I can show user a file upload progress
for(var file of event.target.files){
if(!this.isValidFormat(file)) continue;
var thumb = await this.getThumb(file);
this.upload.push({
name: file.name,
index: parseInt(Math.round(file.size / (1024))),
size: (file.size / (1024 * 1024)).toFixed(2) + ' MB',
progress: 0,
thumbnail: thumb,
path: null
});
const formData = new FormData();
formData.append('image', file, file.name);
formData.append('path', 'tmp');
formData.append('index', increment);
axios.post('web/upload', formData, {
onUploadProgress: progressEvent => {
//console.log(this.config);
const index = parseInt(Math.round(progressEvent.total / 1024));
//console.log(index)
var u = this.upload.find(u => u.index == index);
if(u !== undefined){
u.progress = Math.floor((progressEvent.loaded * 100) / progressEvent.total);
}
}
}).then(res => {
this.upload[res.index].path = res.path;
console.log(this.upload)
});
increment++;
}
Instead of using an arrow function for onUploadProgress - you can use bind() to add an additional 1st argument to your function which will be a reference to the corresponding file object:
for(var file of event.target.files){
if(!this.isValidFormat(file)) continue;
var thumb = await this.getThumb(file);
const fileObject = {
name: file.name,
size: (file.size / (1024 * 1024)).toFixed(2) + ' MB',
progress: 0,
thumbnail: thumb,
path: null
};
this.upload.push(fileObject);
const formData = new FormData();
formData.append('image', file, file.name);
formData.append('path', 'tmp');
formData.append('index', increment);
axios.post('web/upload', formData, {
onUploadProgress: this.calcProgress.bind(this, fileObject)
}).then(res => {
this.upload[res.index].path = res.path;
console.log(this.upload)
});
increment++;
}
function calcProgress(fileObj, progressEvent)
{
fileObj.progress = Math.floor((progressEvent.loaded * 100) / progressEvent.total);
}
You can commit the value with vuex and use it where you want with a getter.
https://vuex.vuejs.org/guide/mutations.html
https://vuex.vuejs.org/guide/getters.html
I want to retrieve list of images in one go from Amazon S3 based on image URL.
Currently I am able to fetch single image using the following code:-
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
AWS.config.region = region;
var bucketInstance = new AWS.S3();
var params = {
Bucket: bucketName,
Key: awsImgUrl
}
bucketInstance.getObject(params, function (err, file) {
if (file) {
var dataSrc = "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
callbackSuccess(dataSrc);
} else {
callbackSuccess("Error");
}
});
EncodeData = function (data) {
var str = data.reduce(function (a, b) { return a + String.fromCharCode(b) }, '');
return btoa(str).replace(/.{76}(?=.)/g, '$&\n');
}
In my scenario I have multiple S3 image url like awsImgUrl1, awsImgUrl2..awsImgUrln.
How to fetch it in one go instead of one by one?
You cannot get more than one image per api call with S3. You can however make multiple calls in parallel.
Using promises this is straightforward.
var bucketInstance = new AWS.S3();
var imageKeys = [ awsImgUrl1, awsImgUrl2, awsImgUrl3];
var promisesOfS3Objects = imageKeys.map(function(key) {
return bucketInstance.getObject({
Bucket: bucketName,
Key: key
}).promise()
.then(function (file) {
return "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
})
})
Promise.all(promisesOfS3Objects)
.then(callbackSuccess) // callbackSuccess is called with an array of string
.catch(function() { callbackSuccess("Error") })
You can change the way you upload the image data. Instead of uploading a single image, upload one document containing multiple image datas.
const addImageBlock = () => {
var photoBlock = [
{
imageId: 'id',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id2',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id3',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id4',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
}
//...ect
];
s3.upload({
Key: photoBlockId + '.json',
Body: photoBlock,
ACL: 'public-read'
}, function(err, data) {
if (err) {
return alert('There was an error', err.message);
}
});
}
Then when you receive this data with one s3 call, you can loop through and render the images on the frontend,
getObject(params, function (err, file) {
imageArr = [];
if (file) {
JSON.parse(file.toString()).map((image) => {
var image = new Image();
image.src = image.body;
imageArr.push(image)
})
callbackSuccess(imageArr);
}
else {
callbackSuccess("Error");
}
});
AWS SDK does not have any method to read multiple files as once and same with console, you can not download multiple files at once.
they have only GetObject method do read a object in bucket by key only.
so in your case you have to read one by one with their key name only if you already have key names as list..
you can get summary of objects in bucket if you would like to get list of objects then put a loop to download all files.
I am new in firebase and angularjs and i am having difficulties in getting download url from firebase storage and store them in firebase realtime database.
I was able to upload multiple files to firebase storage. the problem is when i store the download url into firebase realtime database, all database url value are same.It should different based each files downloadURL.
Here my script:
$scope.submitPhotos = function(file){
console.log(file);
var updateAlbum = [];
for (var i = 0; i < file.length; i++) {
var storageRef=firebase.storage().ref(albumtitle).child(file[i].name);
var task=storageRef.put(file[i]);
task.on('state_changed', function progress(snapshot){
var percentage=( snapshot.bytesTransferred / snapshot.totalBytes )*100;
if (percentage==100){
storageRef.getDownloadURL().then(function(url) {
var galleryRef = firebase.database().ref('gallery/'+albumkey);
var postkey = firebase.database().ref('gallery/'+albumkey).push().key;
updateAlbum={img:url};
firebase.database().ref('gallery/'+ albumkey+'/'+postkey).update(updateAlbum);
});
};
})
};
};
As you can see i was able store the url into database but all of the urls are same. What i need is every key store each different links from storage.
Any helps appreciated. Thanks
function uploadImg(file,i) {
return new Promise((resolve,reject)=>{
var storageRef=firebase.storage().ref("store-images/"+file[i].file.name);
task = storageRef.put(file[i].file);
task.on('state_changed', function progress(snapshot){
var percentage=( snapshot.bytesTransferred / snapshot.totalBytes )*100;
console.log(percentage);
// use the percentage as you wish, to show progress of an upload for example
}, // use the function below for error handling
function (error) {
console.log(error);
},
function complete () //This function executes after a successful upload
{
task.snapshot.ref.getDownloadURL().then(function(downloadURL) {
resolve(downloadURL)
});
});
})
}
async function putImage(file) {
for (var i = 0; i < file.length; i++) {
var dd = await uploadImg(file,i);
firebase.database().ref().child('gallery').push(dd);
}
}
Try using the code below:
$scope.submitPhotos = function(file){
console.log(file);
var updateAlbum = [];
for (var i = 0; i < file.length; i++) {
var storageRef=firebase.storage().ref(albumtitle).child(file[i].name);
var task=storageRef.put(file[i]);
task.on('state_changed', function progress(snapshot)
{
var percentage=( snapshot.bytesTransferred / snapshot.totalBytes )*100;
// use the percentage as you wish, to show progress of an upload for example
}, // use the function below for error handling
function (error) {
switch (error.code) {
case 'storage/unauthorized':
// User doesn't have permission to access the object
break;
case 'storage/canceled':
// User canceled the upload
break;
case 'storage/unknown':
// Unknown error occurred, inspect error.serverResponse
break;
}
}, function complete () //This function executes after a successful upload
{
let dwnURL = task.snapshot.downloadURL;
let galleryRef = firebase.database().ref('gallery/'+albumkey);
let postkey = firebase.database().ref('gallery/'+albumkey).push().key;
updateAlbum={img:dwnURL};
firebase.database().ref('gallery/'+ albumkey+'/'+postkey).update(updateAlbum);
});
};
};
All the best!
I have a complex object parameter that I need to send as post, as it could be too long for querystring. The post call is asking to have an excel file dynamically generated and then downloaded asynchronously. But all of this is happening inside of a react application. How does one do this using axios.post, react, and webapi? I have confirmed that the file does generate and the download up to the response does come back, but I'm not sure how to actually open the file. I have a hidden iframe that I'm trying to set the path, src, of the file to, but I dont know what response property to use.
// webapi
[HttpPost]
public HttpResponseMessage Post([FromBody]ExcelExportModel pModel)
{
var lFile = ProductDataModel.GetHoldingsExport(pModel);
var lResult = new HttpResponseMessage(HttpStatusCode.OK);
lResult.Content = new ByteArrayContent(lFile);
lResult.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment")
{
FileName = "HoldingsGridExport.xls"
};
lResult.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
return lResult;
}
// client side api
static getHoldingsExport({ UserConfigurationID, UserID, Configurations, ViewName, SortModel, FilterModel, UserConfigType, IsDefault, LastPortfolioSearchID = null, ProductId }) {
const filterModel = JSON.stringify(FilterModel); // saving as string as this model is dynamically generated by grid out of my control
const sortModel = JSON.stringify(SortModel);
let params = JSON.stringify({
UserConfigurationID,
UserID,
Configurations,
ViewName,
filterModel,
sortModel,
UserConfigType,
IsDefault,
LastPortfolioSearchID,
ProductId
});
return axiosInstance.post("/api/HoldingsExport", params);
}
// client side app call to get file
HoldingsApi.getHoldingsExport(config)
.then(function(response) {
debugger;
let test = response;
})
.catch(error => {
toastr.success('Failed to get export.');
});
This is how I've achieved file downloads by POSTing via Axios:
Axios.post("YOUR API URI", {
// include your additional POSTed data here
responseType: "blob"
}).then((response) => {
let blob = new Blob([response.data], { type: extractContentType(response) }),
downloadUrl = window.URL.createObjectURL(blob),
filename = "",
disposition = response.headers["content-disposition"];
if (disposition && disposition.indexOf("attachment") !== -1) {
let filenameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/,
matches = filenameRegex.exec(disposition);
if (matches != null && matches[1]) {
filename = matches[1].replace(/['"]/g, "");
}
}
let a = document.createElement("a");
if (typeof a.download === "undefined") {
window.location.href = downloadUrl;
} else {
a.href = downloadUrl;
a.download = filename;
document.body.appendChild(a);
a.click();
}
}).catch((error) => {
// ...
});
Just in case the above solution does not serve you quite well, here is how I could be able to download videos that are hosted on S3 AWS buckets,
const handleDownload = () => {
const link = document.createElement("a");
link.target = "_blank";
link.download = "YOUR_FILE_NAME"
axios
.get(url, {
responseType: "blob",
})
.then((res) => {
link.href = URL.createObjectURL(
new Blob([res.data], { type: "video/mp4" })
);
link.click();
});
};
And I trigger handleDownload function in a button with onClick.
The url in the function has the video URL from S3 buckets
I am using ngCropImage to crop an image and want to upload it following this link:
NgCropImage directive is returning me dataURI of the image and I am converting it to a blob (after converting it I get a blob object: which has size and type), Converted DataURI to blob using following code:
/*html*/
<img-crop image="myImage" result-image="myCroppedImage" result-image-size="250"></img-crop>
$scope.myImage='';
$scope.myCroppedImage = {image: ''}
var blob;
//called when user crops
var handleFileSelect=function(evt) {
var file=evt.currentTarget.files[0];
var reader = new FileReader();
reader.onload = function (evt) {
$scope.$apply(function($scope){
$scope.myImage=evt.target.result;
});
};
console.log($scope.myCroppedImage)
reader.readAsDataURL(file);
var link = document.createElement('link');
blob = dataURItoBlob($scope.myCroppedImage)
console.log(blob)
};
angular.element(document.querySelector('#fileInput')).on('change',handleFileSelect);
function dataURItoBlob(dataURI) {
// convert base64/URLEncoded data component to raw binary data held in a string
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for(var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {type: mimeString});
}
$scope.upload = function(file) {
//var file = new File(file, "filename");
// Configure The S3 Object
console.log($scope.creds)
AWS.config.update({ accessKeyId: $.trim($scope.creds.access_key), secretAccessKey: $.trim($scope.creds.secret_key) });
AWS.config.region = 'us-east-1';
var bucket = new AWS.S3({ params: { Bucket: $.trim($scope.creds.bucket) } });
if(file) {
//file.name = 'abc';
var uniqueFileName = $scope.uniqueString() + '-' + file.name;
var params = { Key: file.name , ContentType: file.type, Body: file, ServerSideEncryption: 'AES256' };
bucket.putObject(params, function(err, data) {
if(err) {
// There Was An Error With Your S3 Config
alert(err.message);
return false;
}
else {
// Success!
alert('Upload Done');
}
})
.on('httpUploadProgress',function(progress) {
// Log Progress Information
console.log(Math.round(progress.loaded / progress.total * 100) + '% done');
});
}
else {
// No File Selected
alert('No File Selected');
}
}
$scope.uniqueString = function() {
var text = "";
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for( var i=0; i < 8; i++ ) {
text += possible.charAt(Math.floor(Math.random() * possible.length));
}
return text;
}
//for uploading
$scope.handleSave = function(){
$scope.upload(blob);
}
Now, I want to upload this blob on S3 using this, but I am not able to figure out how to upload this blob file to s3 (as I am not getting 'name' in the blob file)
Any help would be really appreciated. Thanks
You can always create file from blob. You can pass file name also.
var file = new File([blob], "filename");
This same file object you can use to upload on s3.
Change your handleSave method to following. File name will be abc.png for now
//for uploading
$scope.handleSave = function(){
blob = dataURItoBlob($scope.myCroppedImage)
$scope.upload(new File([blob], "abc.png"));
}
It is not advisable that you do to put the key
secretAccessKey: $.trim($scope.creds.secret_key)
on the client side ... That is not done !, anyone can manipulate your bucket at will.