Uploading blob file to Amazon s3 - angularjs

I am using ngCropImage to crop an image and want to upload it following this link:
NgCropImage directive is returning me dataURI of the image and I am converting it to a blob (after converting it I get a blob object: which has size and type), Converted DataURI to blob using following code:
/*html*/
<img-crop image="myImage" result-image="myCroppedImage" result-image-size="250"></img-crop>
$scope.myImage='';
$scope.myCroppedImage = {image: ''}
var blob;
//called when user crops
var handleFileSelect=function(evt) {
var file=evt.currentTarget.files[0];
var reader = new FileReader();
reader.onload = function (evt) {
$scope.$apply(function($scope){
$scope.myImage=evt.target.result;
});
};
console.log($scope.myCroppedImage)
reader.readAsDataURL(file);
var link = document.createElement('link');
blob = dataURItoBlob($scope.myCroppedImage)
console.log(blob)
};
angular.element(document.querySelector('#fileInput')).on('change',handleFileSelect);
function dataURItoBlob(dataURI) {
// convert base64/URLEncoded data component to raw binary data held in a string
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for(var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {type: mimeString});
}
$scope.upload = function(file) {
//var file = new File(file, "filename");
// Configure The S3 Object
console.log($scope.creds)
AWS.config.update({ accessKeyId: $.trim($scope.creds.access_key), secretAccessKey: $.trim($scope.creds.secret_key) });
AWS.config.region = 'us-east-1';
var bucket = new AWS.S3({ params: { Bucket: $.trim($scope.creds.bucket) } });
if(file) {
//file.name = 'abc';
var uniqueFileName = $scope.uniqueString() + '-' + file.name;
var params = { Key: file.name , ContentType: file.type, Body: file, ServerSideEncryption: 'AES256' };
bucket.putObject(params, function(err, data) {
if(err) {
// There Was An Error With Your S3 Config
alert(err.message);
return false;
}
else {
// Success!
alert('Upload Done');
}
})
.on('httpUploadProgress',function(progress) {
// Log Progress Information
console.log(Math.round(progress.loaded / progress.total * 100) + '% done');
});
}
else {
// No File Selected
alert('No File Selected');
}
}
$scope.uniqueString = function() {
var text = "";
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for( var i=0; i < 8; i++ ) {
text += possible.charAt(Math.floor(Math.random() * possible.length));
}
return text;
}
//for uploading
$scope.handleSave = function(){
$scope.upload(blob);
}
Now, I want to upload this blob on S3 using this, but I am not able to figure out how to upload this blob file to s3 (as I am not getting 'name' in the blob file)
Any help would be really appreciated. Thanks

You can always create file from blob. You can pass file name also.
var file = new File([blob], "filename");
This same file object you can use to upload on s3.
Change your handleSave method to following. File name will be abc.png for now
//for uploading
$scope.handleSave = function(){
blob = dataURItoBlob($scope.myCroppedImage)
$scope.upload(new File([blob], "abc.png"));
}

It is not advisable that you do to put the key
secretAccessKey: $.trim($scope.creds.secret_key)
on the client side ... That is not done !, anyone can manipulate your bucket at will.

Related

how to export data into CSV and PDF files using angularjs

I want to, when i click on button (separate for both CSV and PDF), it automatically download in CSV and PDF file with correct Formatting.
this CSV code i want to add PDF inside code
$scope.downloadData = function() {
var datasets = $scope.datasets.reverse();
var file_name = $scope.m_id+ '.csv';
var dataUrl = 'data:text/csv;charset=utf-8,';
var json = [];
if(datasets !== null) {
for(idx = 0; idx < datasets.length; idx++) {
var dataset = datasets[idx].data;
var time = datasets[idx].timestamp;
time = $filter('date')(time, "dd/MMMM/yyyy-hh:mm a");
dataset.time = time;
json.push(dataset);
}
var fields = Object.keys(json[0]);
var csv = json.map(
function(row) {
return fields.map(
function(fieldName) {
return '"' + (row[fieldName] || '') + '"';
}
);
}
);
csv.unshift(fields);
var csv_str = csv.join('%0A');
var downloadURL = dataUrl + csv_str;
var saveAs = function(uri, filename) {
var link = document.createElement('a');
if (typeof link.download === 'string') {
document.body.appendChild(link); // Firefox requires the link to be in the body
link.download = filename;
link.href = uri;
link.target = "_blank";
link.click();
document.body.removeChild(link); // remove the link when done
} else {
location.replace(uri);
}
};
saveAs(downloadURL, file_name);
} else {
$scope.err_msg = 'Failed to get data. Try reloading the page.';
}
};
I try some of script i found on internet, but it is not working, some have formatting issue and save have downloading.
In Advance Thanks.
You should use this awesome library for pdf/csv or whatever else formats.. File Saver
Here's is code example, service created using FileSaver
function download(api, file, contentType) {
var d = $q.defer();
$http({
method: 'GET',
url: api,
responseType: 'arraybuffer',
headers: {
'Content-type': contentType
}
}).success(function(response) {
var data = new Blob([response], {
type: contentType+ ';charset=utf-8'
});
FileSaver.saveAs(data, file);
d.resolve(response);
}).error(function(response) {
d.reject(response);
});
return d.promise;
}
file input is name of file, you can use same service and pass the types and file names direct from controller.
Let;s you service name is homeService
for pdf call
homeservice.download('/api/download/whaever', 'export.pdf', 'application/pdf')

Zipping multiple files in Nodejs having size ~ 300kb each and streaming to client

My code is working fine when I zip 3 files around 300kb each and send it to client. Used following links for help:
Dynamically create and stream zip to client
how to convert multiple files to compressed zip file using node js
But as soon as I try to zip 4th file I get "download - Failed Network error" in chrome.
Following is my code:
var express = require('express');
var app = express();
var fileSystem = require('fs');
var Archiver = require('archiver');
var util = require('util');
var AdmZip = require('adm-zip');
var config = require('./config');
var log_file = fileSystem.createWriteStream(__dirname + '/debug.log', {flags : 'a'});
logError = function(d) { //
log_file.write('[' + new Date().toUTCString() + '] ' + util.format(d) + '\n');
};
app.get('/zip', function(req, res, next) {
try {
res = setHeaderOfRes(res);
sendZip(req, res);
}catch (err) {
logError(err.message);
next(err); // This will call the error middleware for 500 error
}
});
var setHeaderOfRes = function (res){
res.setHeader("Access-Control-Allow-Origin", "*"); //Remove this when this is on production
res.setHeader("Content-Type", "application/zip");
res.setHeader("Content-disposition", "attachment;");
return res;
};
var sendZip = function (req, res) {
var filesNotFound = [];
zip.pipe(res);
if (req.query.leapIds) {
var leapIdsArray = req.query.leapIds.split(',');
var i, lengthi;
for (i = 0, lengthi = leapIdsArray.length; i < lengthi; i++) {
try {
var t = config.web.sharedFilePath + leapIdsArray[i] + '.vsdx';
if (fileSystem.statSync(t).isFile()) {
zip.append(new fileSystem.createReadStream(t), {
name: leapIdsArray[i] + '.vsdx'
});
};
} catch (err) {
filesNotFound.push(leapIdsArray[i] + '.vsdx');
}
}
var k, lengthk;
var str = '';
for (k = 0, lengthk = filesNotFound.length; k < lengthk; k++) {
str += filesNotFound[k] +',';
}
if(filesNotFound.length > 0){
zip.append('These file/files does not exist on server - ' + str , { name: 'logFile.log' });
}
zip.finalize();
}
};
I tried zip.file instead of zip.append that didn't work.
I want to zip minimum 10 files of 300kb each and send it to the client. Can anyone please let me know the approach.
Thanks
/********************* Update ****************************************
I was only looking at server.js created in node. Actually the data is sent correctly to client. Angularjs client code seems to be not working for large files.
$http.get(env.nodeJsServerUrl + "zip?leapIds=" + nodeDetails, { responseType: "arraybuffer" }
).then(function (response) {
nodesDetails = response.data;
var base64String = _arrayBufferToBase64(nodesDetails);
function _arrayBufferToBase64(buffer) {
var binary = '';
var bytes = new Uint8Array(buffer);
var len = bytes.byteLength;
for (var i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return window.btoa(binary);
}
var anchor = angular.element('<a/>');
anchor.attr({
href: 'data:application/zip;base64,' + base64String,
target: '_blank',
download: $scope.main.routeParams.sectorId + "-ProcessFiles.zip"
})[0].click();
});
This part href: 'data:application/zip;base64,' + base64String, seems to be failing for large data received from server. For small files it is working. Large files it is failing.
Found out.
The problem was not in nodejs zipping logic. That worked perfect.
Issue was in the way I was handling the received response data.
If the data that is received is too large then following code fails
anchor.attr({
href: 'data:application/zip;base64,' + base64String,
target: '_blank',
download: $scope.main.routeParams.sectorId + "-ProcessFiles.zip"
})[0].click();
so the work around is to use blob:
function b64toBlob(b64Data, contentType, sliceSize) {
contentType = contentType || '';
sliceSize = sliceSize || 512;
var byteCharacters = atob(b64Data);
var byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
var blob = new Blob(byteArrays, { type: contentType });
return blob;
}
var contentType = 'application/zip'
var blob = b64toBlob(base64String, contentType);
saveAs(blob, "hello world.zip");
This link helped me out: How to save binary data of zip file in Javascript?
already answered here: https://stackoverflow.com/a/62639710/8612027
Sending a zip file as binary data with expressjs and node-zip:
app.get("/multipleinzip", (req, res) => {
var zip = new require('node-zip')();
var csv1 = "a,b,c,d,e,f,g,h\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8";
zip.file('test1.file', csv1);
var csv2 = "z,w,x,d,e,f,g,h\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8";
zip.file('test2.file', csv2);
var csv3 = "q,w,e,d,e,f,g,h\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8";
zip.file('test3.file', csv3);
var csv4 = "t,y,u,d,e,f,g,h\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8\n1,2,3,4,5,6,7,8";
zip.file('test4.file', csv4);
var data = zip.generate({base64:false,compression:'DEFLATE'});
console.log(data); // ugly data
res.type("zip")
res.send(new Buffer(data, 'binary'));
})
Creating a download link for the zip file. Fetch data and convert the response to an arraybuffer with ->
//get the response from fetch as arrayBuffer...
var data = response.arrayBuffer();
const blob = new Blob([data]);
const fileName = `${filename}.${extension}`;
if (navigator.msSaveBlob) {
// IE 10+
navigator.msSaveBlob(blob, fileName);
} else {
const link = document.createElement('a');
// Browsers that support HTML5 download attribute
if (link.download !== undefined) {
const url = URL.createObjectURL(blob);
link.setAttribute('href', url);
link.setAttribute('download', fileName);
link.style.visibility = 'hidden';
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
}

Saving Location of Scraped Image to DB - Node/MEAN

After scraping an image I'm able to download to a folder using request. I would like to pass along the location of this image to my Mongoose collection.
In the callback I think there should be a way to save the location so I can pass this along when saving my model object.
exports.createLook = function(req, res) {
var url = req.body.image;
var randomizer = '123456';
var download = function(url, filename, callback) {
request(url)
.pipe(fs.createWriteStream(filename))
.on('close', callback);
};
download(url, '../client/assets/images/' + randomizer + '.jpg', function() {
console.log('done');
// do something?
});
// now get model details to save
var newLook = new Look();
newLook.title = req.body.title;
newLook.image = // image location
newLook.save(function(err, look) {
if(err) return res.send(500);
} else {
res.send(item);
}
}
Assuming that 'randomizer' will be generated I would do:
exports.createLook = function(req, res) {
var url = req.body.image;
var randomizer = getSomthingRandom();
var download = function(url, filename, callback) {
request(url)
.pipe(fs.createWriteStream(filename))
.on('close', callback(filename);
};
download(url, '../client/assets/images/' + randomizer + '.jpg', function(filename) {
console.log('done');
// now get model details to save
var newLook = new Look();
newLook.title = req.body.title;
newLook.image = filename;
....
});

angular-file-upload with ngImgCrop

I'm using (ngImgCrop) to crop an image and then upload the cropped image to server using (angular-file-upload).
I can get the $dataURI from the "on-change" option in ngImgCrop. But I need a File instace to call $upload.
How can I get the File instance of the cropped image in order to upload :
$scope.upload = $upload.upload({
url: '/api/fileupload',
file: [**file cropped here**]
}).progress(function (evt) {
//
}).success(function (data, status, headers, config) {
//
});
I guess you'll find a proper answer in this method. I found it in Github, in the angular-file-upload issues page (https://github.com/nervgh/angular-file-upload/issues/208):
/**
* Converts data uri to Blob. Necessary for uploading.
* #see
* http://stackoverflow.com/questions/4998908/convert-data-uri-to-file-then-append-to-formdata
* #param {String} dataURI
* #return {Blob}
*/
var dataURItoBlob = function(dataURI) {
var binary = atob(dataURI.split(',')[1]);
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
var array = [];
for(var i = 0; i < binary.length; i++) {
array.push(binary.charCodeAt(i));
}
return new Blob([new Uint8Array(array)], {type: mimeString});
};
You should be able to get a file instance doing something like this:
var blob = dataURItoBlob($scope.croppedImage);
I don't know if it works in the good way, but it seems.
try something like:
var uploader = $scope.uploader = new FileUploader({
url: '/saveImagePath',
autoUpload: false
});
angular.element(document.querySelector('#fileInput')).on('change',handleFileSelect);
var handleFileSelect=function(evt) {
var file=evt.currentTarget.files[0];
var reader = new FileReader();
reader.onload = function (evt) {
$scope.$apply(function($scope){
$scope.myImage=evt.target.result;
});
};
reader.readAsDataURL(file);
};
the uploader doesn't support base64 images so you'll need to convert the cropped image from base64 to blob
function base64ToBlob(base64Data, contentType) {
contentType = contentType || '';
var sliceSize = 1024;
var byteCharacters = atob(base64Data);
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
you have to manually attach the files to the queue like this:
$scope.submit = function () {
var file = base64ToBlob($scope.currentPortfolio.croppedImage.replace('data:image/png;base64,',''), 'image/jpeg');
uploader.addToQueue(file);
uploader.uploadAll();
};
in the server side, you got two types of files one posted as HTML file and another un base64 which is the cropped image.

How do you upload an image file to mongoose database using mean js

I am new to the mean stack. I want to know how to upload an image file to the database(mongoose) through angularjs. If possible, please provide me with some code. I have searched the internet but I haven't found any suitable code.
You have plenty ways and tools to achieve what you want. I put one of them here:
For this one I use angular-file-upload as client side. So you need this one in your controller:
$scope.onFileSelect = function(image) {
if (angular.isArray(image)) {
image = image[0];
}
// This is how I handle file types in client side
if (image.type !== 'image/png' && image.type !== 'image/jpeg') {
alert('Only PNG and JPEG are accepted.');
return;
}
$scope.uploadInProgress = true;
$scope.uploadProgress = 0;
$scope.upload = $upload.upload({
url: '/upload/image',
method: 'POST',
file: image
}).progress(function(event) {
$scope.uploadProgress = Math.floor(event.loaded / event.total);
$scope.$apply();
}).success(function(data, status, headers, config) {
$scope.uploadInProgress = false;
// If you need uploaded file immediately
$scope.uploadedImage = JSON.parse(data);
}).error(function(err) {
$scope.uploadInProgress = false;
console.log('Error uploading file: ' + err.message || err);
});
};
And following code in your view (I also added file type handler for modern browsers):
Upload image <input type="file" data-ng-file-select="onFileSelect($files)" accept="image/png, image/jpeg">
<span data-ng-if="uploadInProgress">Upload progress: {{ uploadProgress }}</span>
<img data-ng-src="uploadedImage" data-ng-if="uploadedImage">
For server side, I used node-multiparty.
And this is what you need in your server side route:
app.route('/upload/image')
.post(upload.postImage);
And in server side controller:
var uuid = require('node-uuid'),
multiparty = require('multiparty'),
fs = require('fs');
exports.postImage = function(req, res) {
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
var file = files.file[0];
var contentType = file.headers['content-type'];
var tmpPath = file.path;
var extIndex = tmpPath.lastIndexOf('.');
var extension = (extIndex < 0) ? '' : tmpPath.substr(extIndex);
// uuid is for generating unique filenames.
var fileName = uuid.v4() + extension;
var destPath = 'path/to/where/you/want/to/store/your/files/' + fileName;
// Server side file type checker.
if (contentType !== 'image/png' && contentType !== 'image/jpeg') {
fs.unlink(tmpPath);
return res.status(400).send('Unsupported file type.');
}
fs.rename(tmpPath, destPath, function(err) {
if (err) {
return res.status(400).send('Image is not saved:');
}
return res.json(destPath);
});
});
};
As you can see, I store uploaded files in file system, so I just used node-uuid to give them unique name. If you want to store your files directly in database, you don't need uuid, and in that case, just use Buffer data type.
Also please take care of things like adding angularFileUpload to your angular module dependencies.
I got ENOENT and EXDEV errors. After solving these, below code worked for me.
var uuid = require('node-uuid'),
multiparty = require('multiparty'),
fs = require('fs');
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
var file = files.file[0];
var contentType = file.headers['content-type'];
var tmpPath = file.path;
var extIndex = tmpPath.lastIndexOf('.');
var extension = (extIndex < 0) ? '' : tmpPath.substr(extIndex);
// uuid is for generating unique filenames.
var fileName = uuid.v4() + extension;
var destPath = appRoot +'/../public/images/profile_images/' + fileName;
// Server side file type checker.
if (contentType !== 'image/png' && contentType !== 'image/jpeg') {
fs.unlink(tmpPath);
return res.status(400).send('Unsupported file type.');
}
var is = fs.createReadStream(tmpPath);
var os = fs.createWriteStream(destPath);
if(is.pipe(os)) {
fs.unlink(tmpPath, function (err) { //To unlink the file from temp path after copy
if (err) {
console.log(err);
}
});
return res.json(destPath);
}else
return res.json('File not uploaded');
});
for variable 'appRoot' do below in express.js
path = require('path');
global.appRoot = path.resolve(__dirname);

Resources