I'm using the Snowflake NodeJS connector, and executing the same query several times. Snowflake's query history shows me the results are being returned from cache, as expected (so ~40ms). However, measuring on the client shows responses are taking up to 6s. What could be some causes of the latency, and how would I go about debugging this?
const startTime = performance.now();
const conn = await this.connection;
return new Promise((resolve, reject) => {
conn.execute({
sqlText,
complete: (err, stmt, rows) => {
console.log(
'Finished query, took: ' + (performance.now() - startTime)
);
err ? reject(new Error(err.message)) : resolve(rows);
},
});
});
const startTime = performance.now();
const conn = await this.connection;
return new Promise((resolve, reject) => {
conn.execute({
sqlText,
complete: (err, stmt, rows) => {
console.log(
'Finished query, took: ' + (performance.now() - startTime)
);
err ? reject(new Error(err.message)) : resolve(rows);
},
});
});
will give large times, as the timer starts when the promise is made, yet the execution only starts when the promise is awaited on, outside this code.
if you add another timer inside the promise like
const startTime = performance.now();
const conn = await this.connection;
return new Promise((resolve, reject) => {
const execStartTime = performance.now();
conn.execute({
sqlText,
complete: (err, stmt, rows) => {
console.log(
'Finished query, total took: ' + (performance.now() - startTime) + ' exec took: ' + (performance.now() - execStartTime )
);
err ? reject(new Error(err.message)) : resolve(rows);
},
});
});
now you are measuring the call to snowflake round trip + the ~40ms of the snowflake server time.
The snowflake answer time is how long it takes then to "answer the question after getting the request" aka match a cache. But how long it take your client to make the request, fetch the data, parse it, and declare the transfer done, will be much longer.
You could use something like fiddler or wireshark to watch the transactions (if they are being made from a local machine).
Also are you "experiencing it as slow" and thus looking for slowness, or doing some bad measurements and being shocked they are not zippy fast?
Related
I am working in ReactJs and one of the main aspects of our project is the ability to upload a scorecard and have all of its results parsed and placed into objects. However, due to the nature of these pdfs that get uploaded, there's a LOT of information, an average of 12-14 pages.
Most of the information is irrelevant, I usually will only need pages 5-7, but users will be users, and they upload all 12.
I am using the pdfParser API which is very good, we're not looking for replacements on that. However, due to how large the file is, if I am somewhere with only half-decent connection, I am hit with a 504 error since the process takes so long. If I have good to great connection, there's no issue.
This being said I have two questions:
Is there a way to extend the amount of time that needs to elapse before my computer gives up on the process
Is there a way to parse only SOME of the pages that get submitted?
The relevant code will be shown below...
var url = 'https://pdftables.com/api?key=770oukvvx1wl&format=xlsx-single';
const pdfToExcel = (pdfFile) => {
var req = request.post({encoding: null, url: url}, async function (err, resp, body) {
if (!err && resp.statusCode == 200) {
fs.writeFile(`${pdfFile.path}.xlsx`, body, function(err) {
if (err) {
console.log('error writing file');
}
});
} else {
console.log('error retrieving URL');
};
});
var form = req.form();
form.append('file', fs.createReadStream(`./${pdfFile.path}`));
}
const parseExcel = async (file) => {
let workSheetsFromFile;
if (file.path.search(".xlsx") === -1) {
const filePath = await path.resolve(`./${file.path}.xlsx`)
workSheetsFromFile = await xlsx.parse(`./${file.path}.xlsx`);
await fs.unlinkSync(`./${file.path}`)
await fs.unlinkSync(filePath)
return workSheetsFromFile[0].data
}
if (file.path.search(".xlsx") !== -1) {
const filePath = await path.resolve(`./${file.path}`)
workSheetsFromFile = await xlsx.parse(`./${file.path}`);
await fs.unlinkSync(filePath)
return workSheetsFromFile[0].data
}
}
I am trying to make a poll command for a discord bot in which the user chooses a number of options in the first command (ie '!poll 4') and then chooses the questions and the options. I am having some issues getting the bot to wait for a response before it moves on to the next option in the loop. When I try and use await in the loop it says I cannot use await because it's not an async function, but it is an async function I think. I am very inexperienced with this so I am sure it is a simple error or probably multiple. If anyone can give me advice on a way to make the loop work as intended and ask for each option I would appreciate it. Also is there a way to add if statements to do addFields to an embed? Here is my code:
const Discord = module.require('discord.js');
module.exports = {
name: 'poll',
async execute(message, args) {
function isNumber(n) { return !isNaN(parseFloat(n)) && !isNaN(n - 0) }
if(isNumber(args[1])){
if(args[1]<2) return message.channel.send('Please choose a higher number of options for the poll :)');
if(args[1]>10) return message.channel.send('Please choose a lower number of options for the poll :)');
const filter = response => {
if(!response.author.bot) return response;
};
var question;
var options;
message.channel.send('What question would you like to ask?').then(() => {
message.channel.awaitMessages(filter, { max: 1, time: 15000})
.then(collected => {
question = `${collected.first()}?`;
message.channel.send('Question: ' + question);
for (var i = 0; i < args[1]; i++) {
message.channel.send('What is option ' + (i + 1) + '?').then(() => {
message.channel.awaitMessages(filter, { max: 1, time: 15000})
.then(collected => {
options[i] = collected.first;
message.channel.send(`Option ${i}: ${options[i]}`);
})
.catch(collected => {
message.channel.send('Poll has timed out.');
});
})
}
})
.catch(collected => {
message.channel.send('Poll has timed out.');
});
const embed = new Discord.MessageEmbed()
.setColor(3447003)
.setTitle(question)
.setDescription('choose an option')
/*
if (options[0]) .addField('1️⃣:' + option[0])
if (options[1]) .addField('2️⃣:' + option[1])
if (options[2]) .addField('3️⃣:' + option[2])
if (options[3]) .addField('4️⃣:' + option[3])
if (options[4]) .addField('5️⃣:' + option[4])
if (options[5]) .addField('6️⃣:' + option[5])
if (options[6]) .addField('7️⃣:' + option[6])
if (options[7]) .addField('8️⃣:' + option[7])
if (options[8]) .addField('9️⃣:' + option[8])
if (options[9]) .addField('🔟:' + option[9])
*/
message.channel.send(embed).then(embedMessage => {
if (options[0]) embedMessage.react('1️⃣');
if (options[1]) embedMessage.react('2️⃣');
if (options[2]) embedMessage.react('3️⃣');
if (options[3]) embedMessage.react('4️⃣');
if (options[4]) embedMessage.react('5️⃣');
if (options[5]) embedMessage.react('6️⃣');
if (options[6]) embedMessage.react('7️⃣');
if (options[7]) embedMessage.react('8️⃣');
if (options[8]) embedMessage.react('9️⃣');
if (options[9]) embedMessage.react('🔟');
});
});
}
}
}
Since you say you are trying to use await in your loop, let me take the function it is contained in out from your snippet, format it a little, and try to do some explaining. Disclaimer: I am no expert, so I am learning as well.
.then(collected => {
question = `${collected.first()}?`;
message.channel.send(`Question: ${question}`);
for (var i = 0; i < args[1]; i++) {
message.channel.send(
`What is option ${i + 1}?`
).then(() => {
message.channel.awaitMessages(filter, {
"max": 1,
"time": 15000,
}).then(collected => {
options[i] = collected.first;
message.channel.send(`Option ${i}: ${options[i]}`);
}).catch(collected => {
message.channel.send("Poll has timed out.");
});
});
}
});
But before that, as the first inner .then() still returns a Promise, you can chain the second inner .then() in the outer scope to avoid nesting them too deep, and leave a single .catch() at the end. On that note, it would probably be more accurate to call the catch's parameter something like error. So here's the new snippet:
.then(collected => {
question = `${collected.first()}?`;
message.channel.send('Question: ' + question);
for (var i = 0; i < args[1]; i++) {
message.channel.send(
`What is option ${i + 1}?`
).then(() => {
message.channel.awaitMessages(filter, {
"max": 1,
"time": 15000,
});
}).then(collected => { // Change .then() chaining
options[i] = collected.first;
message.channel.send(`Option ${i}: ${options[i]}`);
}).catch(error => { // Change parameter name
message.channel.send("Poll has timed out.");
});
}
})
What's happening now is that each iteration is running one after the other immediately. You .send() a whole bunch of messages which each return a Promise, and off that Promise, you pass a callback function to .then() which runs once each Promise resolves into a Message. That callback implicitly returns the result of .awaitMessages(), which is also a promise, and once that resolves, the next callback in the next .then() runs with the value of whatever the previous promise resolved to passed in as an argument, and so on.
Alright, so you want to the entire Promise chain to finish processing and resolve before proceeding to the next iteration, right? You can use the await keyword to suspend progress in the relevant anonymous function, until its associated promise-based operation either resolves or rejects. The catch is that that function has to be marked with the async keyword, and in your code, that is not actually the case, you are just making use of Promises and callback functions (regarding "but it is an async function I think"). So, let's add both the aforementioned keywords:
.then(async collected => { // Mark as async
question = `${collected.first()}?`;
message.channel.send('Question: ' + question);
for (var i = 0; i < args[1]; i++) {
await message.channel.send( // Wait for this entire Promise chain to resolve before proceeding
`What is option ${i + 1}?`
).then(() => {
message.channel.awaitMessages(filter, {
"max": 1,
"time": 15000,
});
}).then(collected => {
options[i] = collected.first;
message.channel.send(`Option ${i}: ${options[i]}`);
}).catch(error => {
message.channel.send("Poll has timed out.");
});
}
})
That should cause your desired behaviour, though my edits may have syntax errors as I have not ran it myself. If I got something wrong, please do comment.
You can read more here:
Using async-await
Using Promises
I am asking this , because sometimes(rarely) my query doing multiple additions.
I am opening db every page at the top.
var db = SQLite.openDatabase({name:'appdb.db',createFromLocation: '~appdb.db'})
my query is
db.transaction((tx) => {
tx.executeSql('INSERT INTO messages (chatID,messageID,senderID,message,uri,type,date)'+
'values(?,?,?,?,?,?,?)',[this.state.chatID,data.messageID,data.senderID,data.message,data.uri,data.type,data.date], (tx, results) => {
});
});
I think about convert to this for every query
import db from '../Classes/db';
db.open();
db.transaction((tx) => {
tx.executeSql('INSERT INTO messages (chatID,messageID,senderID,message,uri,type,date)'+
'values(?,?,?,?,?,?,?)',[this.state.chatID,data.messageID,data.senderID,data.message,data.uri,data.type,data.date], (tx, results) => {
});
});
db.close();
I have a NodeJS application which is my server and I created a Database class to help me handle querying my SQL DB. If I send requests a second between each other, everything runs fine.. no problems.. But if I start spamming requests to my server it crashes due to Error: Cannot enqueue Quit after invoking quit.
Here's my query function inside my Database class
static query(query: string): Promise<any> {
console.log('Query: ' + query);
return new Promise((resolve, reject) => {
this.connect().then(success => {
sqlConn.query(query, (err, results) => {
if (err) { return reject(err);
} else {
return resolve(results);
}
});
}).catch(err => {
return reject(err);
}).then( () => {
if (sqlConn.state !== 'disconnected') {
sqlConn.end();
}
});
});
};
and here's the this.connect() function
static connect(): Promise<any> {
return new Promise((resolve, reject) => {
sqlConn = mysql.createConnection(this.connectionData);
sqlConn.connect(err => {
if (err) { return reject(err); } else {
return resolve('SQL connection established');
}
});
});
};
I'm pretty sure the problem appears sometimes, it would still be
processing one query, and then another query comes before the first
one finishes, so it would call sqlConn.end() twice, even when it's
already disconnected? Any help is greatly appreciated...
> Main goal is for the query to wait till it's 100% done before it runs
the next one..
You can simplify your code by using the npm module mysql and use it's built-in connection pool.
From the documentation:
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 10,
host : 'example.org',
user : 'bob',
password : 'secret',
database : 'my_db'
});
pool.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
if (error) throw error;
console.log('The solution is: ', results[0].solution);
});
You can, of course, create your own function that promisifies that call like this:
function query (sql) {
return new Promise((resolve, reject) => {
pool.query(sql, (error, results, fields) =>
error ? reject(error) : resolve({ results, fields });
};
}
If you really wants to use this approach then please use eachSeries function of async library.
var chunkedArray= [];
async.eachSeries(chunkedArray, startUpload, endUpload);
funtion startUpload(data,cb){
//iterate over every single item in array 1 at a time
}
function endUplaod(err){
//finally call this
}
This might help:-
https://caolan.github.io/async/docs.html#eachSeries
But i rather suggest you to use pooling of connection which make less overhead on your db and you can use your mysql more efficiently then making multiple connection.
// Load module
var mysql = require('mysql');
// Initialize pool
var pool = mysql.createPool({
connectionLimit : 10,
host : '127.0.0.1',
user : 'root',
password : 'root',
database : 'db_name',
debug : false
});
module.exports = pool;
I have connected MSSQl with node.js using below code
var sql = require('mssql')
var config ={
server:'xxxx',
database:'xxxx',
user:'xxx',
password:'xxx',
port:'xx'
};
sql.connect(dbconfig, function (err) {
if (err) console.log(err);
var sqlquery='';
const request = new sql.Request();
if(condition)
{
//query to the database and get the repo value
sqlquery='select * from verylargetable';
request.query(sqlquery, function (err, result) {
if (err) console.log(err)
var repo=result.recordset[0].Repo;
//query to the database and get the comm value
sqlquery="select commit from verylargetable where Repo='"+repo+"'";
request.query(sqlquery, function (err, result) {
if (err) console.log(err)
var comm=result.recordset[0].Comm;
if (result.recordset.length > 0)
{
//query to the database and update the table
sqlquery="UPDATE verylargetable set Repo='"+repo+"', WHERE Comm='"+comm+"'";
request.query(sqlquery, function (err,result){
if (err) console.log(err)
console.log("record(s) updated");
});
}
});
});
}
else
{
//query to the database and get the repo value
sqlquery='select * from verylargetable';
request.query(sqlquery, function (err, result) {
if (err) console.log(err)
var repo=result.recordset[0].Repo;
//query to the databaseto insert new record
sqlquery ="INSERT INTO verylargetable VALUES("+repo+"','"+comm+"',1)";
request.query(sqlquery, function (err, result) {
if (err) console.log(err)
});
});
}
});
Based on some conditions have to execute queries. while executing, these queries are updated properly.but sometimes facing below issue
global connection already exists. call sql.close() first.
when i used sql.close() at the end i couldn't initialize connection next time.
Facing connection is closed issue.
I have separate method for database changes.I need to establish the connection at the start of the method and have to close at the end.In between have to execute all the queries.
please let me know how to open and close the connection properly?
sql.connect returns connection, in order to close the connection, just call close on the result.
var pool = sql.connect(dbconfig, function (err) {
if (err) console.log(err);
const request = new sql.Request()
var sqlquery='select * from verylargetable';
request.query(sqlquery, function (err, result) {
if (err) console.log(err)
});
});
pool.close();
I was facing the same issue and solved it with below implementation,
For handling global connection already exists. call sql.close() first we need to create a connection pool for mssql npm version 4.0.4
const config = {
user: '**',
password: '**',
server: '** or localhost',
database: '**',
pool: {
max: 10,
min: 0,
idleTimeoutMillis: 30000
}}
router.get('/SQLConnection1', function (req, res, next) {
const pool1 = new sql.ConnectionPool(config, err => {
// ... error checks
// Query
pool1.request() // or: new sql.Request(pool1)
.query('SELECT ID,Name from Avatar', (err, result) => {
// ... error checks
console.dir(result)
res.json({'Result': result})
})
})
pool1.on('error', err => {
// ... error handler
console.log('Error ' + err);
})
});