I am trying to download a pdf without prompt using the pdf which is already present under mocked test data. And I am able to do the above task using the below code.But when I try to open the downloaded pdf I am getting 'Failed to load pdf document' what am I missing here.Kindly help.
fs.readFile('./src/test-data/service/print/notes.pdf', function(error, content) {
if (error) {
res.writeHead(500);
res.end();
}
else {
res.writeHead(200, {
'Content-Type': 'application/pdf',
'Content-Disposition': 'attachment; filename=notes.pdf'
});
res.end(content, 'utf-8');
}
});
you must use res.download
function fileExist(fullpath) {
try {
return fs.statSync(fullpath).isFile();
} catch (e) {
return false;
}
}
var name = 'notes.pdf';
var filePath = './src/test-data/service/print/' + name;
if (!Files.Exist(filePath)) {
console.log("file does't exist");
} else {
res.download(filePath, name, function(err) {
fs.unlink(filePath);
});
}
if you want keep your file after download replace the line
res.download(filePath, name, function(err) {
fs.unlink(filePath);
});
by
res.download(filePath, name);
in the angular application use $window.open(url, '_self');
This is working perfectly.
var file = fs.createReadStream('./src/test-data/service/print/notes.pdf');
var stat = fs.statSync('./src/test-data/service/print/notes.pdf');
res.setHeader('Content-Length', stat.size);
res.setHeader('Content-Type', 'application/pdf');
res.setHeader('Content-Disposition', 'attachment; filename=agreement.pdf');
file.pipe(res);
If the above solutions do not solve your problem, and your using nginx + express, the problem might be with nginx buffering the response. You can disable that using the following:
proxy_buffering off;
Related
Angular client code:
$http.post('/zip', {
id: _id
})
.success(function (data, status, headers, config) {
var blob = new Blob([data], {type: "application/zip"});
var contentDisp = headers('content-disposition');
if (contentDisp && /^attachment/i.test(contentDisp)) {
var fileName = contentDisp.toLowerCase()
.split('filename=')[1]
.split(';')[0]
.replace(/"/g, '');
//The below command works but generates a corrupt zip file.
FileSaver.saveAs(blob, fileName);
}
})
.error(function () {
console.log("Could not download");
});
NodeJS Server Code:
app.route('/zip/')
.post(function(req, res) {
var output = fs.createWriteStream(join(outdir, outzipfile));
//Using s3zip to archive.
s3Zip
.archive({ s3: s3Client, bucket: bucket}, folder, s3_files)
.pipe(output);
output.on('close', function() {
//This sends back a zip file.
res.download(outPutDirectory + outputBcemFile);
});
output.on('error', function(err) {
console.log(err);
return res.status(500).json({error: "Internal Server Error"});
});
});
Although FileSaver.saveAs works and downloads the zip file, it seems to be corrupted. Is the type "application/zip" correct? I have also tried "octet/stream" and it downloads a corrupt zip file too. Any help would be highly invaluable! Thanks.
This is a bug mentioned in Git in below link :
https://github.com/eligrey/FileSaver.js/issues/156
To solve you need to add : responseType: 'arraybuffer' to your $http Request headers and it will work.
Simple question. How do I save a image blob in Nodejs from angular.
AngularSide:
$scope.upload = function (dataUrl, picFile) {
Upload.upload({
url: 'http://test.dev:3000/register/user/uploads',
data: {
file: Upload.dataUrltoBlob(dataUrl, picFile.name)
},
}).then(function (response) {
$timeout(function () {
$scope.result = response.data;
});
}, function (response) {
if (response.status > 0) $scope.errorMsg = response.status
+ ': ' + response.data;
}, function (evt) {
$scope.progress = parseInt(100.0 * evt.loaded / evt.total);
});
}
nodejs side: Do I need middleware here? if so which one should I use?
router.post('/user/uploads', multipartMiddleware, function(req, resp) {
var newPath = "/Users/testUser/test_hold_files/" + req.files.file.originalFilename;
fs.writeFile(newPath, req.files.file, function(err) {
if (err) {
console.log("Data Error ");
return console.error(err);
}
});
res.status(200).jsonp({status: "status: success "});
});
right now this just writes out the file with correct name but its empty.
You used to be able to access the uploaded file through req.files.imageName and then you would fs.readFile from tmp and write it permanently, which is no longer the case in express 4.0
In Express 4, req.files is no longer available on the req object by default. To access uploaded files on the req.files object, use multipart-handling middleware like busboy, multer, formidable, multiparty, connect-multiparty, or pez.
Soooooooo, you can feel free to use which ever one of those middlewares names above and then follow their API for dealing with uploaded files like images. Hope this helps, enjoy.
Ok,
After a long time of messing with this stuff. I found an answer. It does load the file in my folder.
I feel this is only partial since it does not resize the actual file smaller. It is what is selected with https://github.com/danialfarid/ng-file-upload. I used the
Upload.upload({
url: 'http://test.dev:3000/register/user/uploads',
data: {
file: Upload.dataUrltoBlob(dataUrl, picFile.name)
},
This did zoom into the file on selected image. It did not make the actual file size smaller. I am still looking into this issue.
var formidable = require('formidable'),
util = require('util'),
fs_extra = require('fs-extra');
This is my post to accept images.
router.post('/user/uploads', function (req, res){
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files) {
res.writeHead(200, {'content-type': 'text/plain'});
res.write('received upload:\n\n');
res.end(util.inspect({fields: fields, files: files}));
});
form.on('end', function(fields, files) {
/* Temporary location of our uploaded file */
var temp_path = this.openedFiles[0].path;
/* The file name of the uploaded file */
var file_name = this.openedFiles[0].name;
/* Location where we want to copy the uploaded file */
var new_location = "/Users/testUser/test_hold_files/";
fs_extra.copy(temp_path, new_location + file_name, function(err) {
if (err) {
console.error(err);
} else {
console.log("success!")
}
});
});
});
I have also noticed that I can view the file in chrome but not load it into gimp. Gimp gives me a file error.
Small steps I guess.
Maybe Datsik can give us some insight on what is going on here.
https://stackoverflow.com/users/2128168/datsik
Phil
So I am building a cordova/phonegap app in angularjs 1 and I'm trying to save and read from a file called calendar.txt in the app's private directory/sandbox and can't.
My console logs while debugging show that there are no errors and the file is being created if it doesn't exist, and is being read correctly. However that is not the case. When I build and run on my device, the data is not saved. Also no file is created in the location specified.
I console logged the path it was trying to use and this is it:
file:///data/data/com.adobe.phonegap.app/files/calendar.txt
Here is the code I am using to open the file:
$rootScope.openFile = function(){
var pathToFile = cordova.file.dataDirectory + "calendar.txt";
console.log('path = ' + pathToFile);
window.resolveLocalFileSystemURL(pathToFile,
function(fileEntry){
fileEntry.file(function (file) {
var reader = new FileReader();
reader.onloadend = function (e) {
$rootScope.calendar = JSON.parse(this.result);
console.log('file opened');
console.log(JSON.parse(this.result));
};
reader.readAsText(file);
}, function(error){});
}, function(error){
if(error.code == FileError.NOT_FOUND_ERR){
$rootScope.calendar = new Year();
console.log('no file found so it was created');
$rootScope.saveFile();
}
else{
console.log(error);
}
});
};
And here is the code for my save the file:
$rootScope.saveFile = function(){
var data = JSON.stringify($rootScope.calendar, null, '\t');
var fileName = "calendar.txt"
window.resolveLocalFileSystemURL(cordova.file.dataDirectory,
function(directoryEntry){
directoryEntry.getFile(fileName, { create: true },
function (fileEntry) {
fileEntry.createWriter(
function (fileWriter) {
var blob = new Blob([data], { type: 'text/plain' });
fileWriter.write(blob);
console.log('file saved');
},
function (error){});
},
function (error){}
);
},
function(error){
console.log("Saving Error: Error during finding directory", error.message);
}
);
};
I have used this tutorial to get this far: Cordova File Plugin Tutorial
What am I doing wrong?
I assume that you are facing this issue in Android as i too faced the same. As per my understanding and googling, in android (atleast in android 5) you cant write in application data directory unless the phone is rooted. So you may have to use externalRootDirectory instead.
eg: window.resolveLocalFileSystemURL(cordova.file.externalRootDirectory,successCallback, errorCallback);
Hope it helps.
i've got an Application based on NodeJS and AngularJS and pushed it to openshift. But everytime i try to upload something, i get the following error:
POST http://www.domain.de/api/upload/file 502 (Bad Gateway)
Angular sends the data like this:
$scope.newFile = function() {
$scope.id = $scope.group._id;
var fd = new FormData();
var file = $scope.files[0];
fd.append('file', file);
if (file.type!="application/pdf"){
mvNotifier.error("Nur PDF Dateien sind akzeptiert.");
return;
}
$http.post('/api/upload/file', fd, {
transformRequest: angular.identity,
headers:{'Content-Type': undefined}
})
.success(function(d) {
var data = {
name: file.name,
description: $scope.descriptionfile
}
mvNotifier.notify("Bis hier hin klappt alles");
console.log("sucess on uploading ");
mvFactory.POST(data, mvGroup, {_place:"file", _id:$scope.id}).then(function(data) {
$scope.newfile=false;
$scope.group.files.push({name:file.name, description:$scope.descriptionfile});
mvNotifier.notify("Datei hochgeladen");
}, function(reason) {
mvNotifier.error("reason");
})
})
.error(function(data,status,header) {
mvNotifier.error("Upload hat nicht funktioniert.")
console.log("data", data);
console.log("status", status);
console.log("header", header);
})
And the Server routes to a file using busboy to save it:
uploadFile: function(req,res) {
console.log("req",req.files);
if (process.env.OPENSHIFT_DATA_DIR!= undefined) {
var cPath = process.env.OPENSHIFT_DATA_DIR;
} else {
var cPath = path.resolve('..', 'data');
}
var busboy = new Busboy({ headers: req.headers });
req.pipe(busboy);
busboy.on('file', function(fieldname, file, filename, encoding, mimetype) {
var wPath = cPath + '/uploads/documents';
file.pipe(fs.createWriteStream(wPath + '/' + filename));
file.on('end', function() {
console.log('File [' + fieldname + '] Finished');
});
});
busboy.on('finish', function() {
console.log('Done parsing form!');
});
res.status(200).end();
}
On Localhost everything is working just fine and the data is saved to the server. but i get the response of bad gateway and this header:
<title>502 Bad Gateway</title>
</head><body>
<h1>Bad Gateway</h1>
<p>The proxy server received an invalid response from an upstream server.<br />
Can someone help me please?
I've resolved the issue. I post it so that someone who has the problem can solve it to.
The Web load balancer at openshift is HAproxy and has issues with the upload because the response of the server wasn't identical with the request because of the content type. I switched from $http from angular to an XHR. That solved the issue and works fine. The content-type i didn't set at all. now it works fine.
I am trying to upload file to AWS S3 and its working fine. But when the file upload is going on for multiple files , how do i get the progress for each file. Below is my code in AngularJs
upload: function (file) {
options = {
accessKeyId : 'xxxxxxx',
secretAccessKey : 'xxxxxxxxxxxxxxxxxxxxxxx',
region : 'xxxxxx'
}
var s3 = new AWS.S3(options);
var params = {
Bucket : bucketStructure,
Key: file.name,
ContentType: file.type,
Body: file,
ServerSideEncryption: 'xxxx',
ACL : 'private'
};
s3.putObject(params, function(err, data) {
if(err) {
// There Was An Error With Your S3 Config
alert('AWS Error : '+err.message);
return false;
}
else {
// Success!
alert('Upload Done');
}
})
.on('httpUploadProgress',function(progress) {
//console.log(Math.round(progress.loaded / progress.total * 100) + '% done');
});
}
I am calling the above code which is in a service function, in a loop. So when the user clicks on form submit button, i get the two files to upload and in a loop below i am calling the above function:
angular.forEach($rootScope.awsfiles, function (file) {
FileFactory.Upload(file);
});
Now how do i get to know for which file the progress to show ?? Any other better ideas to get this working?? Thanks!