Got a component in AngularJS (porting soon to Angular 7) for updating a user profile that invokes an AngularJS service method to perform a PUT to /api/user/:id.
Want to add a small photo (<100K) to send in the same PUT with the other fields then handle the request like this in the controller...
// route: PUT /api/user/:id
import db from '../../utils/db';
export async function upsert(req, res) {
const { id, name, phone, email, photo } = req.body;
// users.photo in PostgreSQL has datatype of bytea
const sql = `UPDATE users SET name = $2, phone = $3, email = $4, photo = $5) WHERE id = $1 RETURNING id, name, phone, email, photo;`;
const { rows } = db.query(sql, [id, name, phone, email, photo];
return res.status(200).send(rows);
}
Is there a clean way to encode the image client-side so it can be included in the JSON the AngularJS service PUTs? Other solutions I found seem like overkill for this use-case - requiring the image upload to be handled very differently from the other fields.
Ended up biting the bullet - creating a separate table for files along with its own API using formidable and node-postgres. In case this helps anyone else, here's how it turned out.
PostgreSQL data definition...
-- DROP SEQUENCE public.files_seq;
CREATE SEQUENCE IF NOT EXISTS public.files_seq;
-- DROP TABLE public.files;
CREATE TABLE IF NOT EXISTS public.files (
_id integer PRIMARY KEY DEFAULT nextval('files_seq'::regclass),
name character varying(512) NOT NULL,
type character varying(20) NOT NULL,
data bytea
);
-- DROP INDEX public.users_first_name;
CREATE INDEX files_name ON public.files USING btree (name);
Controller for Express...
import stream from 'stream';
import fs from 'fs';
import { IncomingForm } from 'formidable';
import db from '../../utils/db';
// Returns list of images
export async function index(req, res) {
const { rows } = await db.query('SELECT _id, name, type FROM files ORDER BY name;', []);
return res.send(rows);
}
// Uploads a single file
export async function upload(req, res) {
let _id;
new IncomingForm().parse(req, (err, fields, files) => {
if(err) throw err;
if(Array.isArray(files)) throw new Error('Only one file can be uploaded at a time');
const { name, type, path } = files.file;
fs.readFile(path, 'hex', async(err, fileData) => {
if(err) throw err;
fileData = `\\x${fileData}`;
const sql = 'INSERT INTO files (name, type, data) VALUES($1, $2, $3) RETURNING _id;';
const { rows } = await db.query(sql, [name, type, fileData]);
_id = rows[0]._id;
res.send({ id: _id });
// console.log(`Uploaded ${name} to ${path} and inserted into database (ID = ${_id})`);
// No need to delete the file uploaded as Heroku has an ephemeral file system
});
});
}
// Downloads a file by its _id
export async function download(req, res) {
const _id = req.params.id;
const sql = 'SELECT _id, name, type, data FROM files WHERE _id = $1;';
const { rows } = await db.query(sql, [_id]);
const file = rows[0];
const fileContents = Buffer.from(file.data, 'base64');
const readStream = new stream.PassThrough();
readStream.end(fileContents);
res.set('Content-disposition', `attachment; filename=${file.name}`);
res.set('Content-Type', file.type);
readStream.pipe(res);
return rows[0];
}
// Deletes a file from the database (admin-only)
export async function destroy(req, res) {
const _id = req.params.id;
const sql = 'DELETE FROM files WHERE _id = $1;';
await db.query(sql, [_id]);
res.status(204).send({ message: `File ${_id} deleted.`});
}
On the client side, I'm using ng-file-upload with AngularJS.
Here's the relevant part of the view (in pug for brevity)...
.form-group.col-md-6
label(for='photo') Teacher photo
input.form-control(ngf-select='$ctrl.uploadPhoto($file)', type='file', id='photo', name='photo', ng-model='$ctrl.user.photo', ngf-pattern="'image/*'", ngf-accept="'image/*'", ngf-max-size='100KB', ngf-min-height='276', ngf-max-height='276', ngf-min-width='236', ngf-max-width='236', ngf-resize='{width: 236, height: 276}', ngf-model-invalid='errorFile')
ng-messages.help-block.has-error(for='form.photo.$error', ng-show='form.photo.$dirty || form.$submitted', role='alert')
ng-message(when='maxSize') Please select a photo that is less than 100K.
ng-message(when='minHeight,maxHeight,minWidth,maxWidth') The image must be 236 x 276 pixels.
span(ng-if='$ctrl.user.imageId')
img(ng-src='/api/file/{{ $ctrl.user.imageId }}' alt="Photo of Teacher")
and method in its controller...
uploadPhoto(file) {
if(file) {
this.uploadService.upload({
url: '/api/file/upload',
data: { file }
})
.then(response => {
this.user.imageId = response.data.id;
}, response => {
if(response.status > 0) console.log(`${response.status}: ${response.data}`);
}, evt => {
// Math.min is to fix IE which reports 200% sometimes
this.uploadProgress = Math.min(100, parseInt(100.0 * evt.loaded / evt.total, 10));
});
}
}
Related
here is my front end code :
const api = axios.create({
baseURL: Backend_url+`Mode_ticketing_react/Ticketing/`
});
const Ticketing_rest = {
async post_nouveau_ticketing(
nouveau_ticket_fichier_joint,
email,
objet,
description,
categorie,
id_user
) {
try {
const result = await api.post(
"set_nouveau_ticketing/", {
email : email,
objet : objet,
description : description,
categorie : categorie,
id_user : id_user,
nouveau_ticket_fichier_joint : nouveau_ticket_fichier_joint
}
);
console.log("resultat envoye fichier ------", result)
return result.data;
}catch (error) {
console.log("erreur survenue" + error)
}
},
and take this services into view
function handleClick_soumettre_nouveau_ticket(e){
e.preventDefault();
setSpinner_button(true);
Ticketing_rest.post_nouveau_ticketing( champ_file_nouveau,
champ_email,
champ_objet,
champ_description,
champ_categorie,
flexi_auth.user_id
).then((result)=>{
//console.log(result)
});
}
all parameter is take from input field, and my problem is that ALL DATA is submited apart from the "champ_file_nouveau" field which is a file data.
function handleChange_champ_file_nouveau(e){
e.preventDefault();
setChamp_file_nouveau(e.target.files[0]);
console.log("selected file ------",e.target.files[0])
}
my backend is write in Codeigniter and I test it from Postman and it work!! so i think that the problem does not come from Backend but comes from FRONT END!
Any suggestions please.
I already try with FormData() class but does not work!
I'm quite new to Javascript, normally a Python person. I've looked at some other answers but my embed does not add the fields as expected. The embed itself is sent.
My Discord bot follows the guide provided by the devs (primary file, slash commands, command files). I am trying to loop through the entries in an SQLite query and add them as fields.
My command file is below.
const { SlashCommandBuilder } = require('#discordjs/builders');
const { MessageEmbed } = require('discord.js')
const sqlite = require('sqlite3').verbose();
module.exports = {
data: new SlashCommandBuilder()
.setName('rank')
.setDescription('Rank all points.'),
async execute(interaction) {
const rankEmbed = new MessageEmbed()
.setColor('#0099ff')
.setTitle('Rank Board')
let db = new sqlite.Database('./databases/ranktest.db', sqlite.OPEN_READWRITE);
let queryall = 'SELECT name, points FROM pointstable ORDER BY points DESC'
db.all(queryall, [], (err, rows) => {
if (err) {
console.log('There was an error');
} else {
rows.forEach((row) => {
console.log(row.name, row.points)
rankEmbed.addField('\u200b', `${row.name}: ${row.points}`, true);
});
}
})
return interaction.reply({embeds: [ rankEmbed ] });
}
}
I would also like to convert row.name - held as Discord IDs - to usernames i.e. MYNAME#0001. How do I do this by interaction? I was able to obtain the User ID in another command by using interaction.member.id, but in this case I need to grab them from the guild. In Python I did this with await client.fetch_user but in this case the error await is only valid in async functions and the top level bodies of modules is thrown.
Thanks.
OK I've solved the first aspect, I had the return interaction.reply in the wrong place.
Relevant snippet:
rows.forEach((row) => {
console.log(row.name, row.points)
rankEmbed.addField('\u200b', `${row.name}: ${row.points}`, false);
})
return interaction.reply({embeds: [rankEmbed ]} );
Would still appreciate an answer to the converting row.name (user ID) to user name via fetch.
I've solved the second aspect also. Add the below into the loop.
rows.forEach((row) => {
let client = interaction.client
const uname = client.users.cache.get(row.name);
rankEmbed.addField('\u200b', `${uname}: ${row.points}`, false);
So I getting some information in a form.
So I wanna get the information from the form p.x a name and use it to name an array in a schema I am creating.
Here is what i kinda wanna do:
app.post('/edit-rooms', (req, res, next) =>{
const Accomodation = require('../models/accomodation')
Accomodation.findOne({email: req.session.passport.user}).lean().exec((err, user) => {
if (err) {
console.log(err, null);
}
if (user) {
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const AccomSchema = new Schema ({req.body.roomsleft : Array})
const Accomodation = mongoose.model('accomodation', AccomSchema);
Accomodation.update({
roomsleft: req.body.roomsleft,
{req.body.roomsleft}: {
roomnum: req.body.roomnumber,
single: req.body.single,
double: req.body.double,
king: req.body.king,
superking: req.body.superking,
bunk: req.body.bunk,
sofa: req.body.sofa,
button: req.body.button,
},
upsert: true
})
.exec()
}
})
})
So as you see i am getting a name from the body and trying to using it as the name of the array.
But obviously that doesn't work.
Does anyone know how I am supposed to do that correctly?
I have a complex object parameter that I need to send as post, as it could be too long for querystring. The post call is asking to have an excel file dynamically generated and then downloaded asynchronously. But all of this is happening inside of a react application. How does one do this using axios.post, react, and webapi? I have confirmed that the file does generate and the download up to the response does come back, but I'm not sure how to actually open the file. I have a hidden iframe that I'm trying to set the path, src, of the file to, but I dont know what response property to use.
// webapi
[HttpPost]
public HttpResponseMessage Post([FromBody]ExcelExportModel pModel)
{
var lFile = ProductDataModel.GetHoldingsExport(pModel);
var lResult = new HttpResponseMessage(HttpStatusCode.OK);
lResult.Content = new ByteArrayContent(lFile);
lResult.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment")
{
FileName = "HoldingsGridExport.xls"
};
lResult.Content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
return lResult;
}
// client side api
static getHoldingsExport({ UserConfigurationID, UserID, Configurations, ViewName, SortModel, FilterModel, UserConfigType, IsDefault, LastPortfolioSearchID = null, ProductId }) {
const filterModel = JSON.stringify(FilterModel); // saving as string as this model is dynamically generated by grid out of my control
const sortModel = JSON.stringify(SortModel);
let params = JSON.stringify({
UserConfigurationID,
UserID,
Configurations,
ViewName,
filterModel,
sortModel,
UserConfigType,
IsDefault,
LastPortfolioSearchID,
ProductId
});
return axiosInstance.post("/api/HoldingsExport", params);
}
// client side app call to get file
HoldingsApi.getHoldingsExport(config)
.then(function(response) {
debugger;
let test = response;
})
.catch(error => {
toastr.success('Failed to get export.');
});
This is how I've achieved file downloads by POSTing via Axios:
Axios.post("YOUR API URI", {
// include your additional POSTed data here
responseType: "blob"
}).then((response) => {
let blob = new Blob([response.data], { type: extractContentType(response) }),
downloadUrl = window.URL.createObjectURL(blob),
filename = "",
disposition = response.headers["content-disposition"];
if (disposition && disposition.indexOf("attachment") !== -1) {
let filenameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/,
matches = filenameRegex.exec(disposition);
if (matches != null && matches[1]) {
filename = matches[1].replace(/['"]/g, "");
}
}
let a = document.createElement("a");
if (typeof a.download === "undefined") {
window.location.href = downloadUrl;
} else {
a.href = downloadUrl;
a.download = filename;
document.body.appendChild(a);
a.click();
}
}).catch((error) => {
// ...
});
Just in case the above solution does not serve you quite well, here is how I could be able to download videos that are hosted on S3 AWS buckets,
const handleDownload = () => {
const link = document.createElement("a");
link.target = "_blank";
link.download = "YOUR_FILE_NAME"
axios
.get(url, {
responseType: "blob",
})
.then((res) => {
link.href = URL.createObjectURL(
new Blob([res.data], { type: "video/mp4" })
);
link.click();
});
};
And I trigger handleDownload function in a button with onClick.
The url in the function has the video URL from S3 buckets
So I am looking to model our existing redis data into aerospike. One requirement that we have is to be able to get all the keys for a given user. For eg., say we have keys such as <id>:<timestamp>. Now, at some point in time, I need to get all keys for the given id, where I would require something like a prefix search across all keys in the aerospike namespace (which are indexed) to get the values for all <id>:<timestamp> keys. Would like to know if this is possible, and if yes, how.
You cannot do a query on key prefix directly. The server only stores the key digest, so the key value (<id>:<timestamp> in your case) doesn't get indexed.
The way to model this would be to add the <id> part of your key as a separate record bin. Then you can index that bin and run a query on it.
Here is a simple example - it's using the Aerospike Node.js client but the concept is the same no matter what client you prefer:
const Aerospike = require('aerospike')
const ns = 'test'
const set = 'demo'
// connect to cluster and create index on 'id' bin
var client = Aerospike.client()
client.connect((err) => {
assertOk(err, 'connecting to cluster')
createIndex('id', 'id_idx', Aerospike.indexDataType.STRING, () => {
// create a new sample record
var userId = 'user1'
var ts = new Date().getTime()
var key = new Aerospike.Key(ns, set, `${userId}:${ts}`)
var record = { id: userId, value: Math.random() }
client.put(key, record, (err) => {
assertOk(err, 'write record')
// query for records with matching 'id'
var query = client.query(ns, set)
query.where(Aerospike.filter.equal('id', userId))
var stream = query.foreach()
stream.on('error', (error) => assertOk(error, 'executing query'))
stream.on('end', () => client.close())
stream.on('data', (record, meta, key) => {
console.log(record)
})
})
})
})
function assertOk (err, message) {
if (err) {
console.error('ERROR: %s - %s', message, err)
process.quit()
}
}
function createIndex (bin, name, datatype, callback) {
var index = {
ns: ns,
set: set,
bin: bin,
index: name,
datatype: datatype
}
client.createIndex(index, (err, job) => {
assertOk(err, 'creating index')
job.waitUntilDone(100, (err) => {
assertOk(err, 'creating index')
callback()
})
})
}
Hope this helps!