I have a MEAN.js application in which I use https://github.com/nervgh/angular-file-upload to handle angular file uploads to server. What I want to do is receive the files on the server, and then pipe them to another server where I would like to use a writeStream to write them.
Is there a way I can do this?
ar data = {
file : {
buffer:req.files.file.buffer,
filename : req.files.file.name,
content_type : 'application/octet-stream'
},
options : {
operation : 'Content',
source : req.body.source
}
};
needle.post(config.vdnURL,data,{ multipart:true},function(error, response) {
on the other server which is supposed to receive the file and write it to disk, I do the following
mkdirp(config.winUploadPath+ req.body.options.source + '/'+ req.body.options.operation +'/', function(err) {
if (err) {
console.log(err);
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
fs.writeFile(config.winUploadPath + req.body.options.source + '/'+ req.body.options.operation +'/' + req.files.file.name, req.files.file.buffer, function (uploadError) {
if (uploadError) {
console.log('Got upload error ', uploadError);
return res.status(400).send({
message: 'Error occurred while uploading profile picture'
});
} else {
res.status(200).send({
message: 'Upload successful',
url: req.body.options.source + '/'+ req.body.options.operation +'/' + req.files.file.name
});
}
});
}
});
The problem arises when the file size exceeds a couple of MB's (>3-4MB). Files don't get written at all and the second server shows
POST / -- ms -- with no file written.
Hence I thought its a streaming problem. Any help on how maybe I can solve it will be greatly appreciated.
Related
I am working with one application in which I am creating offline PDF and save them in file system.
Now the problem is when I delete the particular record I need to delete the PDF from file system I go through the file plugin but couldn't find any method related to that. I am using ionic 4 here are some peace of code.
if (this.plt.is('cordova')) {
this.pdfObj.getBuffer((buffer) => {
const blob = new Blob([buffer], { type: 'application/pdf' });
// Save the PDF to the data Directory of our App
this.file.writeFile(this.file.externalRootDirectory + '/Downloads/', 'ACCSYS-' +
this.randomString(4) + '-' + encodeURI(this.headerData.title) + '.pdf', blob, { replace: true }).then(fileEntry => {
// Open the PDf with the correct OS tools
setTimeout(() => {
this.hideLoader();
this.fileOpener.open(fileEntry.nativeURL, 'application/pdf');
this.pdfObj = null;
}, 1000);
});
});
} else {
setTimeout(() => {
this.hideLoader();
this.pdfObj.download();
this.pdfObj = null;
}, 500);
}
Assume I store the nativeURL in localstorage.
any idea how to delete the file ??
If you already have a fileEntry object you can use the remove() method to delete the file like this:
fileEntry.remove(function() {
// if the file has been successfully removed
}, function(error) {
// if there was an error removing the file
}, function() {
// if the file does not exist
});
See these links for more documentation and examples.
here is the perfect way to do it ( define window on the top of ts file )
delete() {
// this.fileHelper.removeFile();
const fileToRemove = this.remoteURL; // Change this with your file path
window.resolveLocalFileSystemURL( fileToRemove, (dirEntry) => {
dirEntry.remove(this.successHandler, this.errorHandler);
});
}
successHandler() {
console.log('Directory deleted successfully');
}
errorHandler() {
console.log('There is some error while deleting directory')
}
I am trying to get the contents of a .json file using a node js service into an angularjs method. But am getting following error:
_http_outgoing.js:700
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type object
at ServerResponse.end (_http_outgoing.js:700:13)
here are the corresponding code fragments...
angular controller: the commented lines are all of those which i have tried and failed with.
var currentProcess = "process_1cA";
$scope.storestats = [];
var resAss = $resource('/procs/getstorestats');
var stats = resAss.get({
process: currentProcess,
date: date.getFullYear() + "" + m + "" + d
});
stats.$promise.then(function(response) {
if (response != undefined) {
// var r = JSON.parse(response);
//$scope.storestats.push(r);
//$scope.storestats.push(r);
//var r = JSON.parse(response);
$scope.storestats.push(response);
//angular.forEach(r, function(value, key) {
// $scope.storestats.push({key : value});
//});
}
});
NODEJs service:
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
//var r = data.toString('utf8');
var msgs2 = JSON.stringify(msgs1);
console.log(msgs1);
res.end(msgs1);
});
}
catch (err) {
res.end(err.toString());
}});
P.S: The commented out lines are those which i have tried out with and failed. Also, the commented lines in the node service code snippet, give no error, and when logged show it correctly, but the data when in response of the controllers is blank.
I'm guessing a bit here, but I think you just need to change res.end() to res.send() in your Node code. The "end" method is used when you are streaming chunks of data and then you call end() when you're all done. The "send" method is for sending a response in one go and letting Node handle the streaming.
Also, be sure you are sending a string back!
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
//var r = data.toString('utf8');
var msgs2 = JSON.stringify(msgs1);
console.log(msgs1);
res.send(msgs2); // NOTE THE CHANGE to `msg2` (the string version)
});
}
catch (err) {
res.send(err.toString()); // NOTE THE CHANGE
}
});
I had a similar error. It was because I was passing process.pid to res.end(). It worked when I changed process.pid to string
res.end(process.pid.toString());
Figured it out. 2 small changes were needed.. One in the controller, which was to use a "$resource.query" instead of "$resource.get". And in the service, as #jakarella said, had to use the stringified part in the .end();
Controller:
var resAss = $resource('/procs/getstorestats');
var stats = resAss.query({process: currentProcess, date: date.getFullYear() + "" + m + "" + d});
stats.$promise.then(function (response) {
$scope.storestats.push(response);
}
Node Service:
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
var msgs2 = JSON.stringify(msgs1);
console.log(msgs2);
res.end(msgs2);
});
}
If you are using 'request-promise' library set the json
var options = {
uri: 'https://api.github.com/user/repos',
qs: {
access_token: 'xxxxx xxxxx'
},
headers: {
'User-Agent': 'Request-Promise'
},
json: true // Automatically parses the JSON string in the response
};
rp(options)
.then(function (repos) {
})
.catch(function (err) {
});
Thank you user6184932, it work
try {
await insertNewDocument(fileNameDB, taskId);
res.end(process.pid.toString());
} catch (error) {
console.log("error ocurred", error);
res.send({
"code": 400,
"failed": "error ocurred"
})
}
in mysql2 the reason for the error is the sql word , sql is a query :
const sql = select * from tableName
pool.executeQuery({
sql,
name: 'Error list for given SRC ID',
values: [],
errorMsg: 'Error occurred on fetching '
})
.then(data => {
res.status(200).json({ data })
})
.catch(err => {
console.log('\n \n == db , icorp fetching erro ====> : ', err.message, '\n \n')
})
I got the error using Node v12 (12.14.1).
Uncaught TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type number
Sample code for context.
const { Readable } = require('stream')
Readable.from(Buffer.from(base64content, 'base64'))
.pipe( ... )
Solution (for my case), was upgrading to Node v14 (14.17.3). e.g.
nvm use 14
nvm
In my serverless app, I want to create pdf which is generated dynamically and then upload that created pdf into aws s3. My problem is, when a url is returned to client-side code from server, uploaded url doesn't working. My code is given below:
Client-side javascript code (angular.js)
$scope.downloadAsPDF = function() {
// first I have to sent all html data into server
var html = angular.element('html').html(); // get all page data
var service = API.getService();
service.downloadPdf({}, { html : html }, // api call with html data
function(res) {
console.log("res : ", res);
window.open(res.url); // open uploaded pdf file
// err: The server replies that you don't have permissions to download this file
// HTTP/1.1 403 Forbidden
}, function(err) {
console.log("err : ", err);
});
};
Serverless Code
var fs = require('fs');
var pdf = require('html-pdf');
var AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = function(event, context) {
if (event.html) { // client html data
AWS.config.update({
accessKeyId: 'xxxxxxxxxxxxxxxxx',
secretAccessKey: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
region: 'xxxxxxxxxxxxxxxxxxxx'
});
var awsInfo = {
bucket: 'xxxxx-xxxxxx'
};
var baseUrl = 'https://s3-my-region.amazonaws.com/s3-upload-directory';
var folderRoot = 'development/pdf';
// unique file name
var output_filename = Math.random().toString(36).slice(2) + '.pdf';
// file created directory
var output = '/tmp/' + output_filename;
pdf.create(event.html, options).toStream(function(err, stream) {
if( err ) {
console.log('pdf err : ', err);
} else {
writeStream =fs.createWriteStream(output);
s3.putObject({
Bucket : awsInfo.bucket,
Key : folderRoot + '/' + output_filename,
Body : fs.createReadStream(output),
ContentType : "application/pdf"
},
function(error, data) {
if (error != null) {
console.log("error: " + error);
} else {
// upload data: { ETag: '"d41d8cd98f00b204e9800998ecf8427e"' }
console.log('upload data : ', data);
return cb(null, {
// return actual aws link, but no file
// ex: 'https://s3-my-region.amazonaws.com/s3-upload-directory/output_filename.pdf
url: baseUrl + '/' + output_filename
});
}
});
}
}
};
I've solve my problem. I was trying to upload pdf before I generate pdf. I have solve this problem using the following code:
pdf.create(event.html, options).toStream(function(err, stream) {
if (err) {
console.log('pdf err : ', err);
} else {
var stream = stream.pipe(fs.createWriteStream(output));
stream.on('finish', function () {
s3.putObject({
Bucket : awsInfo.bucket,
Key : folderRoot + '/' + output_filename,
Body : fs.createReadStream(output),
ContentType : "application/pdf"
},
function(error, data) {
if (error != null) {
console.log("error: " + error);
return cb(null, {
err: error
});
} else {
var url = baseUrl + '/' + output_filename
return cb(null, {
url: url
});
}
});
});
}
});
I have done similar kind of thing before. I want a few clarifications from you and then I will be able to help you better.
1) In your code (server side), you have mentioned in the callback function that actual aws link is getting returned.
Are you sure that your file is getting uploaded to Amazon s3. I mean did you check your bucket for the file or not?
2) Have you set any custom bucket policy on Amazon s3. Bucket policy play an important role in what can be downloaded from S3.
3) Did you check the logs to see exactly which part of code is causing the error?
Please provide me this information and I think the I should be able to help you.
if we don't want to upload at s3 just return generated file from aws-lambda.
I am using streaming-s3 node-modules, which is working fine on my local machine.
But on live it doesn't seem working. I have https enabled on live server.
if i disabled https on live server my uploading is working fine .
Here is my code
exports.uploadFile = function (fileReadStream, awsHeader, cb) {
//set options for the streaming module
var options = {
concurrentParts: 2,
waitTime: 20000,
retries: 2,
maxPartSize: 10 * 1024 * 1024
};
//call stream function to upload the file to s3
var uploader = new streamingS3(fileReadStream, config.aws.accessKey, config.aws.secretKey, awsHeader, options);
//start uploading
uploader.begin();// important if callback not provided.
// handle these functions
uploader.on('data', function (bytesRead) {
console.log(bytesRead, ' bytes read.');
});
uploader.on('part', function (number) {
console.log('Part ', number, ' uploaded.');
});
// All parts uploaded, but upload not yet acknowledged.
uploader.on('uploaded', function (stats) {
console.log('Upload stats: ', stats);
});
uploader.on('finished', function (response, stats) {
console.log(response);
logger.log('info', "UPLOAD ", response);
cb(null, response);
});
uploader.on('error', function (err) {
console.log('Upload error: ', err);
logger.log('error', "UPLOAD Error: ", err);
cb(err);
});
Any idea about this
Thanks
You need to enable ssl in your client, add sslEnabled option like following
var options = {
sslEnabled: true,
concurrentParts: 2,
waitTime: 20000,
retries: 2,
maxPartSize: 10 * 1024 * 1024
};
you can check more about the options you can use http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Config.html
I am developing an application for a radio station, have a section to download your audio programs. Works perfectly on Android and iOS, but Windows Phone starts downloading (very very slow) and after a time the application breaks showing this error message
An exception of type 'System.Runtime.InteropServices.SEHException' occurred in Unknown Module. and wasn't handled before a managed/native boundary
Also during the short time that is downloading shows this message
DispatchFileTransferProgress : FileTransfer1024600849
The size of the files you download is between 15 and 20 mb.
Anyone know what happens? Thanks in advance
Edited for add code
Code:
window.requestFileSystem(
LocalFileSystem.PERSISTENT,
0,
function onRequestFileSystemSuccess(fileSystem) {
fileSystem.root.getDirectory(
'Radio', //Directorory was created when apps starts firs time
{ create: true, exclusive: false },
function(directoryEntry) {
directoryEntry.getFile(
'dummy.html',
{ create: true, exclusive: false },
function onGetFileSuccess(fileEntry) {
var path = fileEntry.toURL().replace('dummy.html', '');
var fileTransfer = new FileTransfer();
fileEntry.remove();
var url = record.get('file');
var file = record.get('file').split('/');
fileTransfer.onprogress = function(progressEvent) {
if (progressEvent.lengthComputable) {
console.log('Downloaded: ' + ((progressEvent.loaded / progressEvent.total) * 100));
}
};
fileTransfer.download(
url,
path + file[file.length - 1],
function(file) {
console.log('Downloaded!');
},
function(error) {
console.log("download error source " + error.source);
console.log("download error target " + error.target);
console.log("upload error code" + error.code);
}
);
},
function fail(error) {
console.log('Get file ' + error.code);
}
);
},
function fail(error) {
console.log('Get directory ' + error.code);
}
);
},
function fail(error) {
console.log('Get file system ' + error.code);
}
);
Miguel