Apologies for how trivial this may be and formatting. Link to package github: https://github.com/CollectionFS
I'm following the Store a File From the Server example.
In lib/collections.js I have:
Images = new FS.Collection("images", {
stores: [new FS.Store.FileSystem("imagest", {path: "~/uploads"})]
});
In server/test.js:
var myText = 'Hello world, I wrote this..:)';
var buffer = Buffer(myText.length);
for (var i = 0; i < myText.length; i++) {
buffer[i] = myText.charCodeAt(i);
}
Images.storeBuffer('serverFile.txt', buffer, {
// Set a contentType (optional)
contentType: 'text/plain',
noProgress: false,
// Attach custom data to the file
metadata: { text: 'some stuff' },
// Set encoding (optional default 'utf-8')
encoding: 'utf-8'
});
The error is thrown at the storeBuffer call. I've tried a few variations of saving a file from a server but I would really like to use this package.
Thanks.
This source states that the method storeBuffer should be available or FS.File.setDataFromBuffer in the devel branch, but both don't work for me.
But I found a solution by reading the source-code of FS.File:
file = new FS.File()
file.attachData buffer, {type: "text/plain"}, ->
Images.insert file
Related
I'm trying to download an excel file with the click of a button in my web application. I can see the data come across from my api request, but when I download the file and try to open it I either get a:
"We found a problem with some content in ... Do you want us to try to recover as much as possible" YES => "This file is corrupt and cannot be opened"
or
"... the file format or file extension is not valid. Verify that theh file has not been corrupted..."
If I open the original file saved it works fine so it's not the file. I think the problem is somewhere in the React Code.
I've looked at a lot of other questions on Stack Overflow about this same topic but none of the answers seem to be working for me.
React
React.useEffect(() => {
if (template && downloadBtn.current != null) {
axios
.get<Blob>(
`/template`,
{ params: { filename: template } }
// responseType: 'blob' or "arraybuffer" don't work for me
)
.then((resp) => {
console.log(resp.data);
var blob = new Blob([resp.data], {
type: resp.headers['content-type'] // tried keeping and removing this
}); // removing this assuming resp.data is already a blob didn't work
console.log(blob); // PK ... b���C���h����ؒ )���G+N�
const url = window.URL.createObjectURL(blob);
console.log(url); // blob:http://localhost:3000/29fd5f64-da6a-4b9c-b4a4-76cce1d691c8
if (downloadBtn.current != null) {
downloadBtn.current.download = template;
downloadBtn.current.href = url;
}
});
}
}, [template, downloadBtn.current]);
Flask
#app.route('/template', methods=['GET'])
def template():
filename = getRouteData(['filename']) # helper function I wrote to get request.body data
print(os.path.join(
app.config['templates_folder'], filename), file=sys.stderr)
return send_file(os.path.join(app.config['templates_folder'], filename))
# adding as_attachment=True doesn't work for me either
# file path is correct
I have a printer that only accepts application/octet-stream over IPP. My program downloads binary PDF data and I need to convert that into application/octet-stream, which would (supposedly) let the printer decide what to print. However, when I send the data, it just prints binary data as text and not as formatted PDF. I'm using node with npm package 'ipp'.
I had a problem similar to that, in this link! I found a working example that, I modified a little like this to work (mixed some with the pdfkit! example, but shorted).
Here is my working version (node v16.17.0 | npm 8.15.0 | windows 11)
var ipp = require("ipp");
var concat = require("concat-stream");
var PDFDocument = require('pdfkit');
const doc = new PDFDocument();
// Pipe its output somewhere, like to a file or HTTP response
// Render some text
doc
.fontSize(25)
.text('Some text with an embedded font!', 100, 100);
// Add an image, constrain it to a given size, and center it vertically and horizontally
doc.image('./my-image.png', {
fit: [250, 300],
align: 'center',
valign: 'center'
});
doc.pipe(concat(function (data) {
//I used this url with a Brother printer, because the 631 port had some weird problem
var printer = ipp.Printer("http://<ip address>:80/ipp",{version:'2.0'});
var file = {
"operation-attributes-tag":{
"requesting-user-name": "User",
"job-name": "Print Job",
"document-format": "application/octet-stream"
},
data: data
};
printer.execute("Print-Job", file, function (err, res) {
//in case of error
console.log("Error: ",err);
console.log('res',res);
});
}));
//This last line is very important!
doc.end();
note that the version you have to check if your printer supports it
I checked that with this code: (I lost the link where I found this, so that is why there is not reference to it)
var ipp = require('ipp');
var uri = "http://<ip address>:80/ipp";
var data = ipp.serialize({
"operation":"Get-Printer-Attributes",
"operation-attributes-tag": {
"attributes-charset": "utf-8",
"attributes-natural-language": "en",
"printer-uri": uri
}
});
ipp.request(uri, data, function(err, res){
if(err){
return console.log(err);
}
console.log(JSON.stringify(res,null,2));
})
Hey guys I am using NGX Dropzone and I notice when I drag an image into the viewer it is in base64, but when I try to read the console.log(event.addedFiles); I have no information being passed to me with the base64 value. Here's an example of what I get back
[File]
0: File
lastModified: 1625149167659
lastModifiedDate: Thu Jul 01 2021 10:19:27 GMT-0400 (Eastern Daylight Time) {}
name: "210534431_764639924207804_238792847075232344_n.jpeg"
size: 101133
type: "image/jpeg"
webkitRelativePath: ""
__proto__: File
length: 1
__proto__: Array(0)
I have another piece of code which I have been using that transforms a URL into a base64 string. But thats useless to me since the URL can also be shared and opened by anyone from anywhere. However my local image in my computer is only available to me, unless I transform it into base64 which is a string I can save in a database.
This is the script
imageToShow: any;
onURLinserted() {
this.getImage(this.thumb.name).subscribe(data => {
this.createImageFromBlob(data);
}, error => {
console.log("Error occured",error);
});
console.log("Data: ", this.thumb.name);
}
getImage(imageUrl: string): Observable<Blob> {
return this.http
.get<Blob>(imageUrl, { observe: 'body', responseType: 'blob' as 'json' })
}
createImageFromBlob(image: Blob) {
let reader = new FileReader(); //you need file reader for read blob data to base64 image data.
reader.addEventListener("load", () => {
this.imageToShow = reader.result; // here is the result you got from reader which I use to view the image
this.selectedRowData.photo = reader.result; // this is my ngModel read by my HTML input fields
}, false);
if (image) {
reader.readAsDataURL(image);
}
}
//In my HTML code
<img [src]="imageToShow" alt="">
All I am really trying to do is extract the base64 information from the image dragged in there into imageToShow either by using this code if it helps or something similar OR maybe the cdk drag an drop already has a prop that I dont know about
How do I know that the base64 is even available? When I drag an image in it, and I inspect it in the dev tool I can see the src="data:image/jpeg;base64,random stuff..."
Wish I could put some test code here but I will need the dropzone library for it
Looks like ngx-dropzone does not have a prop that provides bas64String.
You can use readAsDataURL to get base64String. The readAsDataURL is used to read the contents of the Blob or File. When the loadend is triggered. At that time, the result attribute contains the data as a data: URL representing the file's data as a base64 encoded string.
The below code worked for me.
html file
<div class="custom-dropzone" ngx-dropzone [accept]="'image/jpeg,image/jpg,image/png,image/gif'"
(change)="onSelect($event)">
<ngx-dropzone-label>
<div>
<h2>Upload photo</h2>
</div>
</ngx-dropzone-label>
<ngx-dropzone-image-preview ngProjectAs="ngx-dropzone-preview" *ngFor="let f of files" [file]="f"
[removable]="true" (removed)="onRemove(f)">
</ngx-dropzone-image-preview>
</div>
.ts file
onSelect(event) {
this.files.push(...event.addedFiles);
if (this.files && this.files[0]) {
for (let i = 0; i < this.files.length; i++) {
this.fileToBase64(this.files[i])
.then(result=>{
const base64String = result.replace('data:', '')
.replace(/^.+,/, ''); // To remove data url part
this.postMultimedias.push({ name:this.files[i].name,content:
base64String});//postMultimedias is a array which holds image name and bas64String
});
}
}
}
fileToBase64 = (file:File):Promise<string> => {
return new Promise<string> ((resolve,reject)=> {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result.toString());
reader.onerror = error => reject(error);
})
}
onRemove(event) {
let position = this.files.indexOf(event);
this.postMultimedias.splice(position, 1);
this.files.splice(position, 1);
}
I've found many posts dealing with saving binary files using the Mongoose Buffer SchemaType. However, most of them deal with image files, and I haven't been able to get them to work with a WAV audio file.
I'm using Recorder.js to save audio recordings from the built-in microphone. I use Recorder.js' exportWAV function to get a BLOB from the finished recording, then read the blob with FileReader and send it to the Node/Express backend where it is then saved to the DB. I've checked using the Mongo CLI and there is data being saved to the relevant field (starting with BinData(0,"UklGR.lotsofdatahere..="). When I try to get the recording by sentence id, the server responds with an appropriately-MIME-typed .wav file that is unplayable.
It seems that I'm missing something in the way that the files are encoded and decoded for storage in MongoDB. When reading the blob spit out by Recorder.js, it looks like it's already base64 encoded. So that's why I tried loading it as a base64 Buffer before saving to Mongo, and then decoding from a base64 buffer on output. What am I missing here? How can I fix these encoding issues? Thanks!
Note: I don't necessarily need GridFS because these files are well under 16MB. Although, if it's a lot faster to stream files from GridFS, maybe I should switch to that solution. However, I'd like to figure out what's wrong with this approach first.
Here's the relevant code from the Angular frontend:
$scope.start = function() {
$scope.rec.record();
}
$scope.export = function() {
$scope.rec.stop();
$scope.rec.exportWAV(function blobCallback(blob) {
$scope.rec.clear();
var reader = new FileReader();
reader.onload = function(event) {
$.ajax({
type: 'POST',
url: '/saveRecording',
data: {
audio: event.target.result,
text: $scope.text,
timestamp: new Date()
}
}).done(function(data) {
console.log(data);
});
}
reader.readAsDataURL(blob);
});
}
The Express routes:
router.post('/saveRecording', function(request, response, next) {
var sentence = new Sentence();
sentence.audio = new Buffer(request.body.audio, 'base64');
sentence.timestamp = request.body.timestamp;
sentence.text = request.body.text;
// Save sentence to DB with Mongoose
sentence.save(function(error, sentence) {
if (error) {
return next(error);
}
// If no error, send added sentence back to the client.
response.json(sentence);
});
});
router.get('/getRecording/:sentenceId', function(request, response, next) {
Sentence.findById(request.params.sentenceId,
function dbCallback (error, sentence) {
if (error) {
return next(error);
}
if (!sentence) {
return next(new Error('Can\'t find sentence'));
}
var base64Audio = new Buffer(sentence.audio, 'base64');
response.writeHead(200, {
'Content-Type': 'audio/x-wav',
'Content-Length': base64Audio.length
});
response.write(base64Audio);
response.end();
});
});
The Mongoose Schema for Sentences:
var SentenceSchema = new mongoose.Schema({
text: String,
audio: Buffer,
timestamp: Date
});
You can try using GridFs for storing your audio files
check that link
I'm building some models to interact with an existing API from a previous project.
The API relies on standard POST methods to save the data.
I've configured a model and proxy up to the point where it does push the data onto the server but there only seems to be two writer types, json & xml.
proxy: {
/* ... */
reader: {
type: 'json',
root: 'results'
},
writer: {
type: '???' // <-- can only see json or xml in the docs
}
}
Isn't there a standard POST writer that simply submits data in post fields?
I'm surprised that wouldn't be a standard writer type.
(Parsing the json format wouldn't be too hard to implement but that would mean updating a lot of the old api files.)
Ok, I was able to create that writer quite easily by checking the existing writers' source code.
One thing those existing writers are able to do - and that may be why the dev team only implemented a json and xml version - is that they can push multiple records at once.
That could be implemented in POST but would be a bit more complicated.
This writer will work if you're trying to push a single model to an api using POST:
Ext.define('Ext.data.writer.SinglePost', {
extend: 'Ext.data.writer.Writer',
alternateClassName: 'Ext.data.SinglePostWriter',
alias: 'writer.singlepost',
writeRecords: function(request, data) {
request.params = data[0];
return request;
}
});
and the use this for the writer in the proxy:
writer: {
type: 'singlepost'
}
Based on Ben answer I've implemented my own writer that will collect all properties of all models into arrays.
For example if you have model like with some fields:
fields:[
{name:'id', type:'int'}
{name:'name', type:'string'}
{name:'age', type:'date'}
]
A request string will be
id=1&id=2&id=...&name=oleks&name=max&name=...&age=...
Code:
Ext.define('Ext.data.writer.SinglePost', {
extend: 'Ext.data.writer.Writer',
alternateClassName: 'Ext.data.SinglePostWriter',
alias: 'writer.singlepost',
writeRecords: function(request, data) {
if(data && data[0]){
var keys = [];
for(var key in data[0]){
keys.push(key);
}
for(var i=0;i<keys.length;i++){
request.params[keys[i]] = [];
for(var j=0;j<data.length;j++){
request.params[keys[i]].push((data[j])[keys[i]]);
}
}
}
return request;
}
});
For Sencha touch 2.0, change the writeRecords method to:
writeRecords: function (request, data) {
var params = request.getParams() || {};
Ext.apply(params, data[0]);
request.setParams(params);
return request;
}
Here's my version, adapted from answers above:
// Subclass the original XmlWriter
Ext.define('MyApp.utils.data.writer.XmlInAPostParameter', {
extend : 'Ext.data.writer.Xml',
// give it an alias to use in writer 'type' property
alias : 'writer.xml_in_a_post_parameter',
// override the original method
writeRecords : function(request, data) {
// call the overriden method - it will put the data that I
// want into request.xmlData
this.callParent(arguments);
// copy the data in request.xmlData. In this case the XML
// data will always be in the parameter called 'XML'
Ext.apply(request.params, {
XML: request.xmlData
});
// Already copied the request payload and will not send it,
// so we delete it from the request
delete request.xmlData;
// return the modified request object
return request;
}
});
Ext.define("MyApp.model.MyModel", {
extend : "Ext.data.Model",
requires : [
'MyApp.utils.data.writer.XmlInAPostParameter'
],
fields : [ 'field_A', 'field_B' ],
proxy : {
type : 'ajax',
api : {
read : '/mymodel/read.whatever',
update : '/mymodel/write.whatever'
},
reader : {
type : 'xml'
},
writer : {
// use the alias we registered before
type : 'xml_in_a_post_parameter'
}
}
});