Angularjs to download a private video file and play from s3 bucket - angularjs

I am trying to download a video file from s3 bucket and show it in my angularjs app. I am trying to do it using AWS Node.js but not able to do it. Please help me
Sample Code
var AWS = require('aws-sdk');
AWS.config.update(
{
accessKeyId: ".. your key ..",
secretAccessKey: ".. your secret key ..",
}
);
var s3 = new AWS.S3();
s3.getObject(
{ Bucket: "my-bucket", Key: "path/to/videofile" },
function (error, data) {
if (error != null) {
alert("Failed to retrieve an object: " + error);
} else {
alert("Loaded " + data.ContentLength + " bytes");
// do something with data.Body
}
}
);
I am able to get the data but not know how to show it, data is an object

The data you're looking for will be in the Body of the response object you received.
Per AWS SDK documentation, this is what you might expect to see in Body:
Body — (Buffer, Typed Array, Blob, String, ReadableStream) Object data.

Got my answer, I need to Body element then convert it to base64 encoded data. Code below
var d = s3.getObject(params,function (error, data) {
if (error != null) {
console.log("Failed to retrieve an object: " + error);
} else {
console.log(data);
$scope.learningVideo = "data:video/mp4;base64," + encode(data.Body);
}
})
And for encode method
function encode(data)
{
var str = data.reduce(function(a,b){ return a+String.fromCharCode(b) },'');
return btoa(str).replace(/.{76}(?=.)/g,'$&\n');
}

Related

How to retrieve multiple image from Amazon S3 using imgURL at once?

I want to retrieve list of images in one go from Amazon S3 based on image URL.
Currently I am able to fetch single image using the following code:-
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
AWS.config.region = region;
var bucketInstance = new AWS.S3();
var params = {
Bucket: bucketName,
Key: awsImgUrl
}
bucketInstance.getObject(params, function (err, file) {
if (file) {
var dataSrc = "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
callbackSuccess(dataSrc);
} else {
callbackSuccess("Error");
}
});
EncodeData = function (data) {
var str = data.reduce(function (a, b) { return a + String.fromCharCode(b) }, '');
return btoa(str).replace(/.{76}(?=.)/g, '$&\n');
}
In my scenario I have multiple S3 image url like awsImgUrl1, awsImgUrl2..awsImgUrln.
How to fetch it in one go instead of one by one?
You cannot get more than one image per api call with S3. You can however make multiple calls in parallel.
Using promises this is straightforward.
var bucketInstance = new AWS.S3();
var imageKeys = [ awsImgUrl1, awsImgUrl2, awsImgUrl3];
var promisesOfS3Objects = imageKeys.map(function(key) {
return bucketInstance.getObject({
Bucket: bucketName,
Key: key
}).promise()
.then(function (file) {
return "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
})
})
Promise.all(promisesOfS3Objects)
.then(callbackSuccess) // callbackSuccess is called with an array of string
.catch(function() { callbackSuccess("Error") })
You can change the way you upload the image data. Instead of uploading a single image, upload one document containing multiple image datas.
const addImageBlock = () => {
var photoBlock = [
{
imageId: 'id',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id2',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id3',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id4',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
}
//...ect
];
s3.upload({
Key: photoBlockId + '.json',
Body: photoBlock,
ACL: 'public-read'
}, function(err, data) {
if (err) {
return alert('There was an error', err.message);
}
});
}
Then when you receive this data with one s3 call, you can loop through and render the images on the frontend,
getObject(params, function (err, file) {
imageArr = [];
if (file) {
JSON.parse(file.toString()).map((image) => {
var image = new Image();
image.src = image.body;
imageArr.push(image)
})
callbackSuccess(imageArr);
}
else {
callbackSuccess("Error");
}
});
AWS SDK does not have any method to read multiple files as once and same with console, you can not download multiple files at once.
they have only GetObject method do read a object in bucket by key only.
so in your case you have to read one by one with their key name only if you already have key names as list..
you can get summary of objects in bucket if you would like to get list of objects then put a loop to download all files.

throw new ERR_INVALID_ARG_TYPE('chunk',['string','Buffer'],chunk);TypeError[ERR_INVALID_ARG_TYPE]:The "chunk" arg must be type string or Buffer

I am trying to get the contents of a .json file using a node js service into an angularjs method. But am getting following error:
_http_outgoing.js:700
throw new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk);
^
TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type object
at ServerResponse.end (_http_outgoing.js:700:13)
here are the corresponding code fragments...
angular controller: the commented lines are all of those which i have tried and failed with.
var currentProcess = "process_1cA";
$scope.storestats = [];
var resAss = $resource('/procs/getstorestats');
var stats = resAss.get({
process: currentProcess,
date: date.getFullYear() + "" + m + "" + d
});
stats.$promise.then(function(response) {
if (response != undefined) {
// var r = JSON.parse(response);
//$scope.storestats.push(r);
//$scope.storestats.push(r);
//var r = JSON.parse(response);
$scope.storestats.push(response);
//angular.forEach(r, function(value, key) {
// $scope.storestats.push({key : value});
//});
}
});
NODEJs service:
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
//var r = data.toString('utf8');
var msgs2 = JSON.stringify(msgs1);
console.log(msgs1);
res.end(msgs1);
});
}
catch (err) {
res.end(err.toString());
}});
P.S: The commented out lines are those which i have tried out with and failed. Also, the commented lines in the node service code snippet, give no error, and when logged show it correctly, but the data when in response of the controllers is blank.
I'm guessing a bit here, but I think you just need to change res.end() to res.send() in your Node code. The "end" method is used when you are streaming chunks of data and then you call end() when you're all done. The "send" method is for sending a response in one go and letting Node handle the streaming.
Also, be sure you are sending a string back!
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
//var r = data.toString('utf8');
var msgs2 = JSON.stringify(msgs1);
console.log(msgs1);
res.send(msgs2); // NOTE THE CHANGE to `msg2` (the string version)
});
}
catch (err) {
res.send(err.toString()); // NOTE THE CHANGE
}
});
I had a similar error. It was because I was passing process.pid to res.end(). It worked when I changed process.pid to string
res.end(process.pid.toString());
Figured it out. 2 small changes were needed.. One in the controller, which was to use a "$resource.query" instead of "$resource.get". And in the service, as #jakarella said, had to use the stringified part in the .end();
Controller:
var resAss = $resource('/procs/getstorestats');
var stats = resAss.query({process: currentProcess, date: date.getFullYear() + "" + m + "" + d});
stats.$promise.then(function (response) {
$scope.storestats.push(response);
}
Node Service:
httpApp.get('/procs/getstorestats', function(req, res, next) {
try {
fs.readFile(cfg.routestatspath + "storestats-"+req.query.process + "-" + req.query.date + ".json", function (err, data) {
var msgs1 = JSON.parse(data);
var msgs2 = JSON.stringify(msgs1);
console.log(msgs2);
res.end(msgs2);
});
}
If you are using 'request-promise' library set the json
var options = {
uri: 'https://api.github.com/user/repos',
qs: {
access_token: 'xxxxx xxxxx'
},
headers: {
'User-Agent': 'Request-Promise'
},
json: true // Automatically parses the JSON string in the response
};
rp(options)
.then(function (repos) {
})
.catch(function (err) {
});
Thank you user6184932, it work
try {
await insertNewDocument(fileNameDB, taskId);
res.end(process.pid.toString());
} catch (error) {
console.log("error ocurred", error);
res.send({
"code": 400,
"failed": "error ocurred"
})
}
in mysql2 the reason for the error is the sql word , sql is a query :
const sql = select * from tableName
pool.executeQuery({
sql,
name: 'Error list for given SRC ID',
values: [],
errorMsg: 'Error occurred on fetching '
})
.then(data => {
res.status(200).json({ data })
})
.catch(err => {
console.log('\n \n == db , icorp fetching erro ====> : ', err.message, '\n \n')
})
I got the error using Node v12 (12.14.1).
Uncaught TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be one of type string or Buffer. Received type number
Sample code for context.
const { Readable } = require('stream')
Readable.from(Buffer.from(base64content, 'base64'))
.pipe( ... )
Solution (for my case), was upgrading to Node v14 (14.17.3). e.g.
nvm use 14
nvm

gmail API for sending users messages in nodejs javascript failes

My nodejs program fails to send messages using the Gmail api.
The solution from Gmail API for sending mails in Node.js does not work for me.
I encode an email with
var {google} = require('googleapis');
// to and from = "some name <blaw.blaw.com"
function makeBody(to, from, subject, message) {
var str = ["Content-Type: text/plain; charset=\"UTF-8\"\r\n",
"MIME-Version: 1.0\r\n",
"Content-Transfer-Encoding: 7bit\r\n",
"to: ", to, "\r\n",
"from: ", from, "\r\n",
"subject: ", subject, "\r\n\r\n",
message
].join('');
encodedMail = new Buffer(str).toString("base64").replace(/\+/g, '-').replace(/\//g, '_');
return encodedMail;
}
Then go to the Google API explorer
https://developers.google.com/apis-explorer/#p/
enter gmail.users.messages.send and the string generated from the above make_body.
An email will be successfully sent. So I know the above encoding is
ok.
When my program tried to send using the following, it fails with error
Error: 'raw' RFC822 payload message string or uploading message via
/upload/* URL required
function sendMessage(auth) {
var gmail = google.gmail('v1');
var raw = makeBody('john g <asdfasdf#hotmail.com>', 'john g<asfasdgf#gmail.com>', 'test subject', 'test message #2');
gmail.users.messages.send({
auth: auth,
userId: 'me',
resource: {
raw: raw
}
}, function(err, response) {
console.log(err || response)
});
}
The auth token is good since I can call gmail.users.labels.list and I use the same authorization when using the API explorer.
Q1: Does anyone know why the above does not work?
Q2: Gmail API for sending mails in Node.js does not explain why the raw email message is wrapped inside a resource field. I tried simply raw and it did not help.
This fails.
gmail.users.messages.send({
auth: auth,
userId: 'me',
resource: {
raw: raw
}
}, function(err, response) {
console.log(err || response)
});
and so does
gmail.users.messages.send({
auth: auth,
userId: 'me',
raw: raw
}, function(err, response) {
console.log(err || response)
});
and so does this GMAIL API for sending Email with attachment
gmail.users.messages.send({
auth: auth,
userId: 'me',
data: raw
}, function(err, response) {
console.log(err || response)
});
Does anyone know where its documented how to pass the "requested body" the api explorer is asking for?
Q3: Why does the google api need substitutions in the base64 encoding?
I tried encoding using
const Base64 = require("js-base64").Base64
var encodedMail = Base64.encode(str);
When I feed this into the API explorer, I get the error
"message": "Invalid value for ByteString:
Ohai! For others that stumble here, a few things. First - we have a complete end to end sample of sending mail now here:
https://github.com/google/google-api-nodejs-client/blob/master/samples/gmail/send.js
Second, the answer above is mostly right :) Instead of installing the latest version of google-auth-library... just remove it from your package.json all together. The getting started guide was very, very wrong (it has since been fixed). googelapis brings in it's own compatible version of google-auth-library, so you really don't want to mess with that by installing your own version :)
The quickstart specifies:
npm install google-auth-library#0.* --save
When I changed this to
npm install google-auth-library -- save
it pulled in version 1.3.1 vs 0.12.0. Everything started working once I changed the code to account for the breaking changes. The latest version of googleapis also has breaking changes. Here is my tweaks to the quickstart:
package.json
....
"dependencies": {
"google-auth-library": "^1.3.1",
"googleapis": "^26.0.1"
}
quickstart.js
var fs = require('fs');
var readline = require('readline');
var {google} = require('googleapis');
const {GoogleAuth, JWT, OAuth2Client} = require('google-auth-library');
var SCOPES = [
'https://mail.google.com/',
'https://www.googleapis.com/auth/gmail.modify',
'https://www.googleapis.com/auth/gmail.compose',
'https://www.googleapis.com/auth/gmail.send'
];
var TOKEN_DIR = (process.env.HOME || process.env.HOMEPATH ||
process.env.USERPROFILE) + '/.credentials/';
var TOKEN_PATH = TOKEN_DIR + 'gmail-nodejs-quickstart.json';
function authorize(credentials, callback) {
var clientSecret = credentials.installed.client_secret;
var clientId = credentials.installed.client_id;
var redirectUrl = credentials.installed.redirect_uris[0];
var auth = new GoogleAuth();
var oauth2Client = new OAuth2Client(clientId, clientSecret, redirectUrl);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, function (err, token) {
if (err) {
getNewToken(oauth2Client, callback);
} else {
oauth2Client.credentials = JSON.parse(token);
callback(oauth2Client);
}
});
}
function getNewToken(oauth2Client, callback) {
var authUrl = oauth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES
});
console.log('Authorize this app by visiting this url: ', authUrl);
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question('Enter the code from that page here: ', function (code) {
rl.close();
oauth2Client.getToken(code, function (err, token) {
if (err) {
console.log('Error while trying to retrieve access token', err);
return;
}
oauth2Client.credentials = token;
storeToken(token);
callback(oauth2Client);
});
});
}
function makeBody(to, from, subject, message) {
var str = ["Content-Type: text/plain; charset=\"UTF-8\"\n",
"MIME-Version: 1.0\n",
"Content-Transfer-Encoding: 7bit\n",
"to: ", to, "\n",
"from: ", from, "\n",
"subject: ", subject, "\n\n",
message
].join('');
var encodedMail = new Buffer(str).toString("base64").replace(/\+/g, '-').replace(/\//g, '_');
return encodedMail;
}
function sendMessage(auth) {
var gmail = google.gmail('v1');
var raw = makeBody('xxxxxxxx#hotmail.com', 'xxxxxxx#gmail.com', 'test subject', 'test message');
gmail.users.messages.send({
auth: auth,
userId: 'me',
resource: {
raw: raw
}
}, function(err, response) {
console.log(err || response)
});
}
const secretlocation = 'client_secret.json'
fs.readFile(secretlocation, function processClientSecrets(err, content) {
if (err) {
console.log('Error loading client secret file: ' + err);
return;
}
// Authorize a client with the loaded credentials, then call the
// Gmail API.
authorize(JSON.parse(content), sendMessage);
});
Now when I run, I get the response
Object {status: 200, statusText: "OK", headers: Object, config: Object, request: ClientRequest, …}

Nodejs S3 Delete Multiple Objects Error

I am trying to bulk delete my s3 objects that are associated with one specific blog record in my database, but I'm getting hung up on how to pass the array to my params object to be used in the s3.deleteObjects method, but I'm held up on this error: Check with error message InvalidParameterType: Expected params.Delete.Objects[0].Key to be a string. I feel like it could be related to not having a loop at some point in the process or maybe the format of the values being passed to my s3File array.
Here is the my routing:
.delete(function(req, res){
models.File.findAll({
where: {
blogId: blog.blogId
}
}).then(function(file){
var s3Files = [];
function s3Key(link){
var parsedUrl = url.parse(link);
var fileName = parsedUrl.path.substring(1);
return fileName;
}
for(var k in file){
console.log('Here are each files ' + file[k].fileName);
s3Files.push(s3Key(file[k].fileName));
}
console.log('Here are the s3Files ' + s3Files);
//GOTTEN TO THIS POINT WITHOUT AN ERROR
aws.config.update({accessKeyId: process.env.AWS_ACCESS_KEY, secretAccessKey: process.env.AWS_SECRET_KEY, region: process.env.AWS_REGION});
//var awsKeyPath = s3Key(file.fileName);
var s3 = new aws.S3();
var options = {
Bucket: process.env.AWS_BUCKET,
Delete: {
Objects: [{
Key: s3Files
}],
},
};
s3.deleteObjects(options, function(err, data){
if(data){
console.log("File successfully deleted");
} else {
console.log("Check with error message " + err);
}
});
});
Here is the output from console.log('Here are each files ' + file[k].fileName);:
Here are each files https://local-bucket.s3.amazonaws.com/1/2017-02-12/screen_shot_2017-02-01_at_8_25_03_pm.png
Here are each files https://local-bucket.s3.amazonaws.com/1/2017-02-13/test.xlsx
Here are each files https://local-bucket.s3.amazonaws.com/1/2017-02-13/screen-shot-2017-02-08-at-8.23.37-pm.png
Here is the output from console.log('Here are the s3Files ' + s3Files);:
Here are the s3Files 1/2017-02-12/screen_shot_2017-02-01_at_8_25_03_pm.png,1/2017-02-13/test.xlsx,1/2017-02-13/screen-shot-2017-02-08-at-8.23.37-pm.png
Here is the error message:
Check with error message InvalidParameterType: Expected params.Delete.Objects[0].Key to be a string
Key should be a string. You should use array of Object to Objects.
Use this code :
var objects = [];
for(var k in file){
objects.push({Key : file[k].fileName});
}
var options = {
Bucket: process.env.AWS_BUCKET,
Delete: {
Objects: objects
}
};
Change your array as an object
const objects = [
{Key: 'image1.jpg'},
{Key: 'image2.jpg'}
]
Add a new item to the list
for(var k in file){
objects.push({Key : file[k].fileName});
}
Set the array as Objects value in parameters
const options = {
Bucket: process.env.BUCKET,
Delete: {
Objects: objects,
Quiet: false
}
};
Now delete objects
s3.deleteObjects(options, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
Learn more from official docs

upload dynamically generated pdf from aws-lambda into aws-s3

In my serverless app, I want to create pdf which is generated dynamically and then upload that created pdf into aws s3. My problem is, when a url is returned to client-side code from server, uploaded url doesn't working. My code is given below:
Client-side javascript code (angular.js)
$scope.downloadAsPDF = function() {
// first I have to sent all html data into server
var html = angular.element('html').html(); // get all page data
var service = API.getService();
service.downloadPdf({}, { html : html }, // api call with html data
function(res) {
console.log("res : ", res);
window.open(res.url); // open uploaded pdf file
// err: The server replies that you don't have permissions to download this file
// HTTP/1.1 403 Forbidden
}, function(err) {
console.log("err : ", err);
});
};
Serverless Code
var fs = require('fs');
var pdf = require('html-pdf');
var AWS = require("aws-sdk");
var s3 = new AWS.S3();
module.exports.handler = function(event, context) {
if (event.html) { // client html data
AWS.config.update({
accessKeyId: 'xxxxxxxxxxxxxxxxx',
secretAccessKey: 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx',
region: 'xxxxxxxxxxxxxxxxxxxx'
});
var awsInfo = {
bucket: 'xxxxx-xxxxxx'
};
var baseUrl = 'https://s3-my-region.amazonaws.com/s3-upload-directory';
var folderRoot = 'development/pdf';
// unique file name
var output_filename = Math.random().toString(36).slice(2) + '.pdf';
// file created directory
var output = '/tmp/' + output_filename;
pdf.create(event.html, options).toStream(function(err, stream) {
if( err ) {
console.log('pdf err : ', err);
} else {
writeStream =fs.createWriteStream(output);
s3.putObject({
Bucket : awsInfo.bucket,
Key : folderRoot + '/' + output_filename,
Body : fs.createReadStream(output),
ContentType : "application/pdf"
},
function(error, data) {
if (error != null) {
console.log("error: " + error);
} else {
// upload data: { ETag: '"d41d8cd98f00b204e9800998ecf8427e"' }
console.log('upload data : ', data);
return cb(null, {
// return actual aws link, but no file
// ex: 'https://s3-my-region.amazonaws.com/s3-upload-directory/output_filename.pdf
url: baseUrl + '/' + output_filename
});
}
});
}
}
};
I've solve my problem. I was trying to upload pdf before I generate pdf. I have solve this problem using the following code:
pdf.create(event.html, options).toStream(function(err, stream) {
if (err) {
console.log('pdf err : ', err);
} else {
var stream = stream.pipe(fs.createWriteStream(output));
stream.on('finish', function () {
s3.putObject({
Bucket : awsInfo.bucket,
Key : folderRoot + '/' + output_filename,
Body : fs.createReadStream(output),
ContentType : "application/pdf"
},
function(error, data) {
if (error != null) {
console.log("error: " + error);
return cb(null, {
err: error
});
} else {
var url = baseUrl + '/' + output_filename
return cb(null, {
url: url
});
}
});
});
}
});
I have done similar kind of thing before. I want a few clarifications from you and then I will be able to help you better.
1) In your code (server side), you have mentioned in the callback function that actual aws link is getting returned.
Are you sure that your file is getting uploaded to Amazon s3. I mean did you check your bucket for the file or not?
2) Have you set any custom bucket policy on Amazon s3. Bucket policy play an important role in what can be downloaded from S3.
3) Did you check the logs to see exactly which part of code is causing the error?
Please provide me this information and I think the I should be able to help you.
if we don't want to upload at s3 just return generated file from aws-lambda.

Resources