How can I get Alexa to read the contents of a single line web page - alexa

I have an external webpage that contains only the following:-
{"date":"25 December 2017"}
Using node.js, how can I get Alexa to read (and say) the date from the webpage.

You can use "http" or "https" package in Node to do this. JSON.parse(responsestring) could easily parse the content you have shown above.
Your external webpage link would replace "yourendpoint" in below code.
var http = require("http");
http.get(yourendpoint, function (response) {
// console.log("response:" + response);
// data is streamed in chunks from the server
// so we have to handle the "data" event
var buffer = "", data;
response.on("data", function (chunk) {
buffer += chunk;
});
response.on("end", function (err) {
if(err) {
speechOutput = "I am sorry, I could not get the data from webpage ."
} else {
console.log("response:" + buffer);
// Parse your response the way you want
speechOutput = "<<Your desired output>>"
}
}
this.emit(':tell', speechOutput);
});
});

Related

convert PDF data into octet-stream for printer autodetection

I have a printer that only accepts application/octet-stream over IPP. My program downloads binary PDF data and I need to convert that into application/octet-stream, which would (supposedly) let the printer decide what to print. However, when I send the data, it just prints binary data as text and not as formatted PDF. I'm using node with npm package 'ipp'.
I had a problem similar to that, in this link! I found a working example that, I modified a little like this to work (mixed some with the pdfkit! example, but shorted).
Here is my working version (node v16.17.0 | npm 8.15.0 | windows 11)
var ipp = require("ipp");
var concat = require("concat-stream");
var PDFDocument = require('pdfkit');
const doc = new PDFDocument();
// Pipe its output somewhere, like to a file or HTTP response
// Render some text
doc
.fontSize(25)
.text('Some text with an embedded font!', 100, 100);
// Add an image, constrain it to a given size, and center it vertically and horizontally
doc.image('./my-image.png', {
fit: [250, 300],
align: 'center',
valign: 'center'
});
doc.pipe(concat(function (data) {
//I used this url with a Brother printer, because the 631 port had some weird problem
var printer = ipp.Printer("http://<ip address>:80/ipp",{version:'2.0'});
var file = {
"operation-attributes-tag":{
"requesting-user-name": "User",
"job-name": "Print Job",
"document-format": "application/octet-stream"
},
data: data
};
printer.execute("Print-Job", file, function (err, res) {
//in case of error
console.log("Error: ",err);
console.log('res',res);
});
}));
//This last line is very important!
doc.end();
note that the version you have to check if your printer supports it
I checked that with this code: (I lost the link where I found this, so that is why there is not reference to it)
var ipp = require('ipp');
var uri = "http://<ip address>:80/ipp";
var data = ipp.serialize({
"operation":"Get-Printer-Attributes",
"operation-attributes-tag": {
"attributes-charset": "utf-8",
"attributes-natural-language": "en",
"printer-uri": uri
}
});
ipp.request(uri, data, function(err, res){
if(err){
return console.log(err);
}
console.log(JSON.stringify(res,null,2));
})

Load images as a service

I have service that pulls an object from an API. Some of that object may contain image URLs. The backend currently scans for these, and processes them, (in PHP) by get_file_contents() and translating them to inline data. This is heavily loading the throughput on my server. The reason I am doing this is because I want to cache the images for being offline later, but in a way that I can still just use regular angular to render the object.
I can't do the processing in Javascript in the browser with $http.get() because the site hosting the images is blocking the cross-site request. What I thought to do, then, was to create an <IMG> element in the browser, that called the service back once it was loaded so I can extract the data and process the object with it.
I can't control the service worker to store the get from inside the app, and the URL's are not known by the app at any time before it downloads the API object anyway.
I did think about redoing the service worker to store gets from off my site as well, but that seemed a little bit wrong, and I'm not sure how well it would work anyway, plus, while developing, I switch off the service worker as it means I have to let the entire site load twice for it to refresh completely.
Can anyone help me with a way to get image data via the browser into my service?
If I had found a CORS supportive image host in the first place I may not have needed this and could probably have just used the $http call.
A directive, service and controller are required, as well as a host that supports CORS (Imgur for example). I also used this base64 canvas code.
Here is the javascript code:
// Using this from https://stackoverflow.com/questions/934012/get-image-data-in-javascript
function getBase64Image(img) {
// Create an empty canvas element
var canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
// Copy the image contents to the canvas
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0);
// Get the data-URL formatted image
// Firefox supports PNG and JPEG. You could check img.src to
// guess the original format, but be aware the using "image/jpg"
// will re-encode the image.
var dataURL = canvas.toDataURL("image/png");
return dataURL;
// return dataURL.replace(/^data:image\/(png|jpg);base64,/, "");
}
// Used on the img tag to handle the DOM notification feeding into the service
app.directive('notifyimgsvc', function() {
return {restrict : 'A', link : function(scope, element, attrs) {
element.bind('load', function() {
console.log('imgSvc::notify() image is loaded');
console.log("imgSvc::notify(): " + this.src);
imgSvc.notifyLoad(this.src, getBase64Image(this));
});
element.bind('error', function() {
console.log('imgSvc::notify() image could not be loaded');
console.log("imgSvc::notify(): " + this.src);
});
}};
});
// A core service to handle the comms in both directions from requests to data
app.service('imgSvc', [function(netSvc) {
imgSvc = this; // to avoid ambiguoity in some inner function calls
imgSvc.images = {}; // a cache of images
imgSvc.requests = []; // the requests and their callbacks
imgSvc.handlers = []; // handlers that will render images
console.log("imgSvc::init()");
// Allows a controller to be notified of a request for an image and
// a callback to call when an image is added. There should only ever
// be one of these so an array is probaby not needed and any further
// requests should probably throw an error.
imgSvc.registerHandler = function(callback) {
console.log("imgSvc::registerHandler()");
if (imgSvc.requests.length) {
// Already have image requests, so tell the new handler about them
for ( var i in imgSvc.requests) {
callback(imgSvc.requests[i].url);
}
}
// Add the new handler to the stack
imgSvc.handlers.push(callback);
};
// The usage function from your code, provide a callback to get notified
// of the data when it loads.
imgSvc.getImg = function(url, callback) {
console.log("imgSvc::getImg('" + url + "')");
// If we have pre-cached it, send it back immediately.
if (imgSvc.images[url] != undefined) {
console.log("imgSvc::getImg('" + url + "'): Already have data for this one");
callback(url, imgSvc.images[url]);
return;
}
// push an object into the request queue so we can process returned data later.
// Doing it this way als means you can have multiple requests before any data
// is returned and they all get notified easily just by looping through the array.
var obj = {"url" : url, "callback" : callback};
if (imgSvc.handlers.length) {
console.log("imgSvc::getImg('" + url + "'): informing handler");
for ( var i in imgSvc.handlers) {
imgSvc.handlers[i](obj.url);
}
}
imgSvc.requests.push(obj);
};
// Notification of a successful load (or fail if src == null).
imgSvc.notifyLoad = function(url, src) {
console.log("imgSvc.notifyLoad()");
// Save the data to the cache so any further calls can be handled
// immediately without a request being created.
imgSvc.images[url] = src;
// Go though the requests list and call any callbacks that are registered.
if (imgSvc.requests.length) {
console.log("imgSvc.notifyLoadCallback('" + url + "'): scanning requests");
for (var i = 0; i < imgSvc.requests.length; i++) {
if (imgSvc.requests[i].url == url) {
console.log("imgSvc.notifyLoadCallback('" + url + "'): found request");
// found the request so remove it from the request list and call it
var req = imgSvc.requests.splice(i, 1)[0];
i = i - 1;
console.log("imgSvc.notifyLoadCallback('" + url + "')");
req.callback(url, src);
} else {
console.log("imgSvc.notifyLoadCallback('" + url + "'): skipping request for '" + imgSvc.requests[i].url + "'");
}
}
} else {
console.log("imgSvc.notifyLoadCallback('" + url + "'): No requests present??");
}
};
// The notifiy fail is just a logging wrapper around the failure.
imgSvc.notifyFail = function(url) {
console.log("imgSvc.notifyFail()");
imgSvc.notifyLoad(url, null);
};
}]);
// A simple controller to handle the browser loading of images.
// Could probably generate the HTML, but just doing simply here.
app.controller('ImageSvcCtrl', ["$scope", function($scope) {
$scope.images = [];
console.log("imgSvcCtrl::init()");
// Register this handler so as images are pushed to the service,
// this controller can render them using regular angular.
imgSvc.registerHandler(function(url) {
console.log("imgSvcCtrl::addUrlHandler('" + url + "')");
// Only add it if we don't hqve it already. The caching in the
// service will handle multiple request for the same URL, and
// all associated malarkey
if ($scope.images.indexOf(url) == -1) {
$scope.images.push(url);
}
});
}]);
The HTML you need for this is very simple:
<div data-ng-controller="ImageSvcCtrl" style="display:none;">
<img data-ng-repeat="img in images" data-ng-src="{{img}}" alt="loading image" crossorigin="anonymous" notifyimgsvc />
</div>
And you call it within your controllers like this:
var req_url = "https://i.imgur.com/lsRhmIp.jpg";
imgSvc.getImg(req_url, function(url, data) {
if(data) {
logger("MyCtrl.notify('" + url + "')");
} else {
logger("MyCtrl.notifyFailed('" + url + "')");
}
});

AngularJS $.post code runs after rest code

I have created following function in AngularJS
var enq_dt = new Date();
$.post("/api/EMSAPI/EnquiryDetails?enq_no="+o_enq_no, null, function (returnedData) {
enq_dt = returnedData["D_O_O"];
console.log("Loading Post Block");
console.log(enq_dt);
});
console.log("Loading General Block ");
console.log(enq_dt);
$scope.CurrentQuotation = {
EnquiryNo:o_enq_no,
EnquiryDate: enq_dt,
QuotationBy:"TEST"
};
I am getting following result in console window.
Loading General Block
2010-11-26T00:00:00
Loading Post Block
2010-12-12T00:00:00
I want to Load Post block first and after that I want to run General Block.
What I am missing (I am new to Angular) ?
Thanks in advance.
I suggest you Google the word "asynchronous". In JavaScript, things like HTTP requests are almost always asynchronous.
To get your general code to run after the post, call it with .then():
function generalCode() {
console.log("Loading General Block ");
console.log(enq_dt);
$scope.CurrentQuotation = {
EnquiryNo:o_enq_no,
EnquiryDate: enq_dt,
QuotationBy:"TEST"
};
}
var enq_dt = new Date();
$.post("/api/EMSAPI/EnquiryDetails?enq_no="+o_enq_no, null)
.then(function (returnedData) {
enq_dt = returnedData["D_O_O"];
console.log("Loading Post Block");
console.log(enq_dt);
})
.then(generalCode);

How to save WAV Blob to MongoDB, retrieve and serve correctly with Node?

I've found many posts dealing with saving binary files using the Mongoose Buffer SchemaType. However, most of them deal with image files, and I haven't been able to get them to work with a WAV audio file.
I'm using Recorder.js to save audio recordings from the built-in microphone. I use Recorder.js' exportWAV function to get a BLOB from the finished recording, then read the blob with FileReader and send it to the Node/Express backend where it is then saved to the DB. I've checked using the Mongo CLI and there is data being saved to the relevant field (starting with BinData(0,"UklGR.lotsofdatahere..="). When I try to get the recording by sentence id, the server responds with an appropriately-MIME-typed .wav file that is unplayable.
It seems that I'm missing something in the way that the files are encoded and decoded for storage in MongoDB. When reading the blob spit out by Recorder.js, it looks like it's already base64 encoded. So that's why I tried loading it as a base64 Buffer before saving to Mongo, and then decoding from a base64 buffer on output. What am I missing here? How can I fix these encoding issues? Thanks!
Note: I don't necessarily need GridFS because these files are well under 16MB. Although, if it's a lot faster to stream files from GridFS, maybe I should switch to that solution. However, I'd like to figure out what's wrong with this approach first.
Here's the relevant code from the Angular frontend:
$scope.start = function() {
$scope.rec.record();
}
$scope.export = function() {
$scope.rec.stop();
$scope.rec.exportWAV(function blobCallback(blob) {
$scope.rec.clear();
var reader = new FileReader();
reader.onload = function(event) {
$.ajax({
type: 'POST',
url: '/saveRecording',
data: {
audio: event.target.result,
text: $scope.text,
timestamp: new Date()
}
}).done(function(data) {
console.log(data);
});
}
reader.readAsDataURL(blob);
});
}
The Express routes:
router.post('/saveRecording', function(request, response, next) {
var sentence = new Sentence();
sentence.audio = new Buffer(request.body.audio, 'base64');
sentence.timestamp = request.body.timestamp;
sentence.text = request.body.text;
// Save sentence to DB with Mongoose
sentence.save(function(error, sentence) {
if (error) {
return next(error);
}
// If no error, send added sentence back to the client.
response.json(sentence);
});
});
router.get('/getRecording/:sentenceId', function(request, response, next) {
Sentence.findById(request.params.sentenceId,
function dbCallback (error, sentence) {
if (error) {
return next(error);
}
if (!sentence) {
return next(new Error('Can\'t find sentence'));
}
var base64Audio = new Buffer(sentence.audio, 'base64');
response.writeHead(200, {
'Content-Type': 'audio/x-wav',
'Content-Length': base64Audio.length
});
response.write(base64Audio);
response.end();
});
});
The Mongoose Schema for Sentences:
var SentenceSchema = new mongoose.Schema({
text: String,
audio: Buffer,
timestamp: Date
});
You can try using GridFs for storing your audio files
check that link

Parse.com Data Storage Error with Facebook API

I am building a web application using Parse.com as my backend. I have run into a problem when trying to store the response from Facebook's user profile API in a Parse database.
FB.api('/me', function(response) {
// Print Response to Console
console.log(response);
// Create New Parse Object
var Facebook = Parse.Object.extend("Facebook");
var userFacebook = new Facebook();
for(var element in response) {
userFacebook.set(element, response[element]);
}
// Store Data on Parse
userFacebook.save(null, {
success: function(user) {
console.log("User Facebook data saved.");
},
error: function(user, error) {
console.log("Facebook data storage error: " + error.code + " (" + error.message + ")");
}
});
The API response prints correctly to the console, and I receive two error messages, one after the other:
Failed to load resource: the server responded with a status of 404
(Not Found) https://api.parse.com/1/classes/Facebook/myID
and
Facebook data storage error: 101 (object not found for update)
In my Parse account, a database titled "Facebook" is created. However, it only contains a header for each of the elements in the response object (e.g., first_name, gender, email, etc.). It does not have a new row with these values.
I am really stuck on this one -- any help would be appreciated!
Note that the response object is in the form:
{
"email":"email#example.com",
"first_name":"myFirstName",
"gender":"myGender",
"id":"myID",
"last_name":"myLastName",
"link":"https://www.facebook.com/app_scoped_user_id/myID/",
"locale":"en_US",
"name":"myFullName",
"timezone":-7,
"updated_time":"2014-03-12T04:57:39+0000",
"verified":true
}
The object in for each loop
for(var element in response) {
userFacebook.set(element, response[element]);
}
FB.api('/me', function(response) {
// Print Response to Console
console.log(response);
// Create New Parse Object
var Facebook = Parse.Object.extend("Facebook");
var userFacebook = new Facebook();
for(var element in response) {
userFacebook.set(element, response[element]);
}
// Store Data on Parse
userFacebook.save(null, {
success: function(user) {
console.log("User Facebook data saved.");
},
error: function(user, error) {
console.log("Facebook data storage error: " + error.code + " (" + error.message + ")");
}
});
The "id" column on a Parse.Object is protected and cannot be set via the API. In my case, Facebook's API response includes an "id" key, which is colliding with the Parse.Object "id" key.
If you are running into the same problem, you simply need to change the name of the "id" column. You can do this with the following code:
for(var element in response) {
if(element=="id") {
userFacebook.set("userID", response[element]);
} else {
userFacebook.set(element, response[element]);
}
}
Thanks to Hector Ramos at Parse for helping me solve the problem! And thanks to Wayne for getting me part of the way!

Resources