How to retrieve multiple image from Amazon S3 using imgURL at once? - angularjs

I want to retrieve list of images in one go from Amazon S3 based on image URL.
Currently I am able to fetch single image using the following code:-
AWS.config.update({
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey
});
AWS.config.region = region;
var bucketInstance = new AWS.S3();
var params = {
Bucket: bucketName,
Key: awsImgUrl
}
bucketInstance.getObject(params, function (err, file) {
if (file) {
var dataSrc = "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
callbackSuccess(dataSrc);
} else {
callbackSuccess("Error");
}
});
EncodeData = function (data) {
var str = data.reduce(function (a, b) { return a + String.fromCharCode(b) }, '');
return btoa(str).replace(/.{76}(?=.)/g, '$&\n');
}
In my scenario I have multiple S3 image url like awsImgUrl1, awsImgUrl2..awsImgUrln.
How to fetch it in one go instead of one by one?

You cannot get more than one image per api call with S3. You can however make multiple calls in parallel.
Using promises this is straightforward.
var bucketInstance = new AWS.S3();
var imageKeys = [ awsImgUrl1, awsImgUrl2, awsImgUrl3];
var promisesOfS3Objects = imageKeys.map(function(key) {
return bucketInstance.getObject({
Bucket: bucketName,
Key: key
}).promise()
.then(function (file) {
return "data:" + file.ContentType + ";base64," + EncodeData(file.Body);
})
})
Promise.all(promisesOfS3Objects)
.then(callbackSuccess) // callbackSuccess is called with an array of string
.catch(function() { callbackSuccess("Error") })

You can change the way you upload the image data. Instead of uploading a single image, upload one document containing multiple image datas.
const addImageBlock = () => {
var photoBlock = [
{
imageId: 'id',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id2',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id3',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
},
{
imageId: 'id4',
type: 'png',
body: 'data:image/png;base64,iVBORw0K...'
}
//...ect
];
s3.upload({
Key: photoBlockId + '.json',
Body: photoBlock,
ACL: 'public-read'
}, function(err, data) {
if (err) {
return alert('There was an error', err.message);
}
});
}
Then when you receive this data with one s3 call, you can loop through and render the images on the frontend,
getObject(params, function (err, file) {
imageArr = [];
if (file) {
JSON.parse(file.toString()).map((image) => {
var image = new Image();
image.src = image.body;
imageArr.push(image)
})
callbackSuccess(imageArr);
}
else {
callbackSuccess("Error");
}
});

AWS SDK does not have any method to read multiple files as once and same with console, you can not download multiple files at once.
they have only GetObject method do read a object in bucket by key only.
so in your case you have to read one by one with their key name only if you already have key names as list..
you can get summary of objects in bucket if you would like to get list of objects then put a loop to download all files.

Related

Nativescript Class constructor Observable cannot be invoked without 'new'

I'm trying to upload a multipart form in nativescript and I'm using http-background. I keep getting the error Class constructor Observable cannot be invoked without 'new'. I've tried changing the compilerOptions target to es5 and es2017, but nothing changed.
Here's all my code from the component.
onSave(){
console.log("clicked")
this.proccessImageUpload(this.file);
}
public onSelectSingleTap() {
this.isSingleMode = true;
let context = imagepicker.create({
mode: "single"
});
this.startSelection(context);
}
private startSelection(context) {
let that = this;
context
.authorize()
.then(() => {
that.imageAssets = [];
that.imageSrc = null;
return context.present();
})
.then((selection) => {
console.log("Selection done: " + JSON.stringify(selection));
this.file = selection[0]._android;
that.imageSrc = that.isSingleMode && selection.length > 0 ? selection[0] : null;
// set the images to be loaded from the assets with optimal sizes (optimize memory usage)
selection.forEach(function (element) {
element.options.width = that.isSingleMode ? that.previewSize : that.thumbSize;
element.options.height = that.isSingleMode ? that.previewSize : that.thumbSize;
});
that.imageAssets = selection;
}).catch(function (e) {
console.log(e);
});
}
// proccess image function
proccessImageUpload(fileUri) {
var backgroundHttp = require("nativescript-background-http");
return new Promise((resolve, reject) => {
// body...
var request = {
url: 'http://192.168.0.2:4000/api/posts',
method: "POST",
headers: {
"Content-Type": "application/octet-stream",
"user_id": "<user_id>"
},
description: 'Uploading profile image..',
androidAutoDeleteAfterUpload: false,
androidNotificationTitle: 'Profile image'
}
var params = [
{ name: "title", value: "test" },
{ name: "content", value: "test" },
{ name: "fileToUpload", filename: fileUri, mimeType: "image/jpeg" }
];
var backgroundSession = backgroundHttp.session('image-upload');
var task = backgroundSession.uploadFile(fileUri, request);
task.on("progress", (e) => {
// console log data
console.log(`uploading... ${e.currentBytes} / ${e.totalBytes}`);
});
task.on("error", (e) => {
// console log data
console.log(`Error processing upload ${e.responseCode} code.`);
reject(`Error uploading image!`);
});
task.on("responded", (e) => {
// console log data
console.log(`received ${e.responseCode} code. Server sent: ${e.data}`);
// var uploaded_response = JSON.parse(e.data);
});
task.on("complete", (e) => {
// console log data
console.log(`upload complete!`);
console.log(`received ${e.responseCode} code`);
// console.log(e.data);
})
resolve(task);
});
}
I know the issue is coming from this line.
var task = backgroundSession.uploadFile(fileUri, request);
Any help would be greatly appreciated!
You use old version if nativescript-background-http plugin
You have to install latest version
tns plugin add #nativescript/background-http
I was able to get this working by installing tns version 6.
I had exactly the same problem. I got this from slack.com, compliments Chris Vietor
"tns plugin add nativescript-background-http" works with nativescript 6.
"tns plugin add #nativescript/background-http" works with nativescript 7.

Trying to get SalesForce to recognize an Attachment as a PDF

I am able to use sObject to put an Attachment onto one of my records. The problem is that SF is not recognizing the file as a PDF but as a generic file.
const base64data = await new Buffer.from(pdfBuffer).toString('base64');
try {
await conn.sobject('Attachment').create({
ParentId: filename,
Name: resumeFileName,
Body: base64data,
ContentType: fileType,
Description: 'Resume Attachment',
});
} catch (e) {
console.log('Attachment Error', e);
}
When I look at the attachments of my record, the file does not have all of the options that a PDF file has (only download and delete)
Thanks in advance!
Turns out in order for Salesforce to recognize the pdf correctly you need to have the content type set to application/pdf AND the name of the file must include the .pdf extension. This worked for me:
(async () => {
const jsforce = require('jsforce');
const fs = require('fs');
var conn = new jsforce.Connection({
instanceUrl : '...',
accessToken : '...'
});
const pdfData = fs.readFileSync('./test.pdf').toString('base64');
try {
await conn.sobject('Attachment').create({
ParentId: '0012300000RWedX',
Name: 'My Test PDF.pdf', // <= Turns out the name has to have .pdf
Body: pdfData,
ContentType: 'application/pdf',
Description: 'Testing PDF Attachment',
});
} catch(err) {
console.error(err);
}
})();

Serving PDF content back to browser via Node Express using pdfMake

I am making use of the pdfmake library for generating PDF documents in my node express application and want these to be sent straight back to the client to trigger the browser to automatically download the file.
As a reference point I have been using the following examples for my express middleware:
https://gist.github.com/w33ble/38c5e0220d491148de1c
https://github.com/bpampuch/pdfmake/issues/489
I have opted for sending a buffered response back, so the key part of my middleware looks like this:
function createPDFDocument(docDefinition, callback) {
var fontDescriptors = {
Roboto: {
normal: './src/server/fonts/Roboto-Regular.ttf',
bold: './src/server/fonts/Roboto-Medium.ttf',
italics: './src/server/fonts/Roboto-Italic.ttf',
bolditalics: './src/server/fonts/Roboto-MediumItalic.ttf'
}
};
var printer = new Printer(fontDescriptors);
var pdfDoc = printer.createPdfKitDocument(docDefinition);
// buffer the output
var chunks = [];
pdfDoc.on('data', function(chunk) {
chunks.push(chunk);
});
pdfDoc.on('end', function() {
var result = Buffer.concat(chunks);
callback(result);
});
pdfDoc.on('error', callback);
// close the stream
pdfDoc.end();
}
In my angular application I am using the $resource service and have an endpoint defined like so:
this.resource = $resource('api/document-requests/',
null,
<any>{
'save': {
method: 'POST',
responseType: 'arraybuffer'
}
});
When I try this out, I dont get any browser download kicking in, the response I receive is as follows when looking in Chrome:
And the response headers are as follows:
So it seems I'm not a million miles off, I have searched around and found solutions mentioning about converting to Blob, but I think that's only relevant if I were serving back a Base64 encoded string of the document.
Can anyone suggest what may be my issue here?
Thanks
Here's a router:
router.get('/get-pdf-doc', async (req, res, next)=>{ try {
var binaryResult = await createPdf();
res.contentType('application/pdf').send(binaryResult);
} catch(err){
saveError(err);
res.send('<h2>There was an error displaying the PDF document.
'</h2>Error message: ' + err.message);
}});
And here's a function to return the pdf.
const PdfPrinter = require('pdfmake');
const Promise = require("bluebird");
createPdf = async ()=>{
var fonts = {
Helvetica: {
normal: 'Helvetica',
bold: 'Helvetica-Bold',
italics: 'Helvetica-Oblique',
bolditalics: 'Helvetica-BoldOblique'
};
var printer = new PdfPrinter(fonts);
var docDefinition = {
content: [
'First paragraph',
'Another paragraph, this time a little bit longer to make sure,'+
' this line will be divided into at least two lines'
],
defaultStyle: {
font: 'Helvetica'
}
};
var pdfDoc = printer.createPdfKitDocument(docDefinition);
return new Promise((resolve, reject) =>{ try {
var chunks = [];
pdfDoc.on('data', chunk => chunks.push(chunk));
pdfDoc.on('end', () => resolve(Buffer.concat(chunks)));
pdfDoc.end();
} catch(err) {
reject(err);
}});
};
Everything seems fine to me, the only thing missing is the logic to trigger the download.
Check out this CodePen as an example.
Here I'm using base64 encoded data, but you can just use binary data as well, just don't forget to change the href, where I'm mentioning scope.dataURL = base64....
I had issue serving PDF files from Node.js as well, so I made use of phantomjs. You can checkout this repository for full codebase and implementation.
console.log('Loading web page')
const page = require('webpage').create()
const args = require('system').args
const url = 'www.google.com'
page.viewportSize = { width: 1024, height: 768 }
page.clipRect = { top: 0, left: 0 }
page.open(url, function(status) {
console.log('Page loaded')
setTimeout(function() {
page.render('docs/' + args[1] + '.pdf')
console.log('Page rendered')
phantom.exit()
}, 10000)
})

Nodejs S3 Delete Multiple Objects Error

I am trying to bulk delete my s3 objects that are associated with one specific blog record in my database, but I'm getting hung up on how to pass the array to my params object to be used in the s3.deleteObjects method, but I'm held up on this error: Check with error message InvalidParameterType: Expected params.Delete.Objects[0].Key to be a string. I feel like it could be related to not having a loop at some point in the process or maybe the format of the values being passed to my s3File array.
Here is the my routing:
.delete(function(req, res){
models.File.findAll({
where: {
blogId: blog.blogId
}
}).then(function(file){
var s3Files = [];
function s3Key(link){
var parsedUrl = url.parse(link);
var fileName = parsedUrl.path.substring(1);
return fileName;
}
for(var k in file){
console.log('Here are each files ' + file[k].fileName);
s3Files.push(s3Key(file[k].fileName));
}
console.log('Here are the s3Files ' + s3Files);
//GOTTEN TO THIS POINT WITHOUT AN ERROR
aws.config.update({accessKeyId: process.env.AWS_ACCESS_KEY, secretAccessKey: process.env.AWS_SECRET_KEY, region: process.env.AWS_REGION});
//var awsKeyPath = s3Key(file.fileName);
var s3 = new aws.S3();
var options = {
Bucket: process.env.AWS_BUCKET,
Delete: {
Objects: [{
Key: s3Files
}],
},
};
s3.deleteObjects(options, function(err, data){
if(data){
console.log("File successfully deleted");
} else {
console.log("Check with error message " + err);
}
});
});
Here is the output from console.log('Here are each files ' + file[k].fileName);:
Here are each files https://local-bucket.s3.amazonaws.com/1/2017-02-12/screen_shot_2017-02-01_at_8_25_03_pm.png
Here are each files https://local-bucket.s3.amazonaws.com/1/2017-02-13/test.xlsx
Here are each files https://local-bucket.s3.amazonaws.com/1/2017-02-13/screen-shot-2017-02-08-at-8.23.37-pm.png
Here is the output from console.log('Here are the s3Files ' + s3Files);:
Here are the s3Files 1/2017-02-12/screen_shot_2017-02-01_at_8_25_03_pm.png,1/2017-02-13/test.xlsx,1/2017-02-13/screen-shot-2017-02-08-at-8.23.37-pm.png
Here is the error message:
Check with error message InvalidParameterType: Expected params.Delete.Objects[0].Key to be a string
Key should be a string. You should use array of Object to Objects.
Use this code :
var objects = [];
for(var k in file){
objects.push({Key : file[k].fileName});
}
var options = {
Bucket: process.env.AWS_BUCKET,
Delete: {
Objects: objects
}
};
Change your array as an object
const objects = [
{Key: 'image1.jpg'},
{Key: 'image2.jpg'}
]
Add a new item to the list
for(var k in file){
objects.push({Key : file[k].fileName});
}
Set the array as Objects value in parameters
const options = {
Bucket: process.env.BUCKET,
Delete: {
Objects: objects,
Quiet: false
}
};
Now delete objects
s3.deleteObjects(options, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else console.log(data); // successful response
});
Learn more from official docs

Downloading from S3 in node and opening in a new window

My Angular 1 application saves files to S3 and allows for a wide variety of files types.
When I retrieve the objects I use the following code:
export function show(req, res) {
const s3 = new aws.S3();
const s3Params = {
Bucket: S3_BUCKET,
Key: req.query.key + ''
};
res.attachment(req.query.key + '');
var fileStream = s3.getObject(s3Params).createReadStream();
fileStream.pipe(res);
}
I would like to open the received file on the client in a new window (just like on the AWS console) but I can't figure out how to go about it.
For example on the client side does not work at all:
.then(
(data) => {
var file = new Blob([data], {type: 'application/pdf'});
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
}
)
I really don't understand how the concept of data streams works.
If you don't have to download pdf, you may open it directly from s3.
s3client.getResourceUrl("your-bucket", "some-path/some-key.jpg");
This will return you url to the file.
So you need code like:
export function show(req, res) {
this.s3client = new aws.S3({
accessKeyId: options.accessKeyId,
secretAccessKey: options.secretAccessKey,
region: options.region
})
let resourceUrl = s3client.getResourceUrl(S3_BUCKET, req.query.key + '');
window.open(resourceUrl, '_blank');
}
I'm sorry, can't test it right now, but try. Should work.
All I had to do was get a signedUrl for the resource for this to work much simpler than what I was trying to do.
export function show(req, res) {
const s3 = new aws.S3();
const s3Params = {
Bucket: S3_BUCKET,
Key: req.query.key + ''
};
s3.getSignedUrl('getObject', s3Params, (err, data) => {
if (err) {
console.log(err);
return res.end();
}
const returnData = {
signedRequest: data,
};
res.write(JSON.stringify(returnData));
res.end();
});
}
and on the client all I have to do is open the link in a new tab:
openDoc(doc) {
this.$http()
.then(
(data) => {
this.$window.open(data.data.signedRequest, '_blank')
}
)
.catch(
(err) => {
this.Notification.error('failed to download attachment');
}
)
}

Resources