I have a database that has a queues table that holds queues names and ids, and the two queue tables (queue 1, and queue 2).
now i have a function that is supposed to add orders to the right queue, but it's only adding to the second queue and i'm already using a for loop to change the queue that should be used.
here's the code
for (int i = 0; i < tmpQueuesIds.length; i++){
await http.post(GlobalState.ADDTOQUEUE, body: {
'queueId': tmpQueuesIds[i].toString(),
'timeinmin': tmpTimes[tmpQueuesIds[i]].toString(),
'resId': _globalState.get('resId').toString(),
'userId': userId.toString(),
});
print("QueueId: " + tmpQueuesIds[i].toString());
print("OrderTime: " + tmpTimes[tmpQueuesIds[i]].toString());
Future.delayed(Duration(milliseconds: 500));
}
and here's the output:
I/flutter ( 3238): QueueId: 1
I/flutter ( 3238): OrderTime: 6
I/flutter ( 3238): QueueId: 2
I/flutter ( 3238): OrderTime: 10
as you can see i have two queues and the values are being changed, but it's inserting the value 10 twice to the queue 2, it should be inserting value 6 to queue 1 and value 10 to queue 2.
and here's the api code
try {
router.post('/', (req, res, next) => {
queueId = req.body.queueId;
resId = req.body.resId;
timeinmin = req.body.timeinmin;
userId = req.body.userId;
var orderId;
sqlQuery = `SELECT id FROM orders where userid = ${userId} and isdone = false`;
con.query(sqlQuery, (err, rows) => {
try {
lastIndex = rows['rows'].length - 1;
orderId = rows['rows'][lastIndex]['id'];
} catch{
console.log('something wrong with setting orderId or lastIndex value in addToQueue.js');
}
if (!err) {
sqlQuery2 = `SELECT name FROM queues where id = ${queueId}`;
try {
con.query(sqlQuery2, function (err, rows) {
try {
queueName = rows['rows'][0]['name'];
} catch{
console.log('something wrong with setting queueName value in addToQueue.js');
}
if (!err) {
sqlQuery2 = `INSERT INTO ${queueName} (timeinmin, resid, orderid) VALUES(${timeinmin}, ${resId}, ${orderId})`;
try {
con.query(sqlQuery2, function (err, rows) {
if (!err) {
res.sendStatus(201);
console.log('added to queue');
} else {
console.error("Failed to add to queue");
console.log(err);
res.sendStatus(202);
}
});
} catch{
console.log('Something went down.');
}
} else {
console.error("Failure");
console.log(err);
res.sendStatus(202);
}
});
} catch{
console.log('Something went down.');
}
} else {
console.log(err);
res.sendStatus(202);
}
})
});
} catch{
console.log('Error');
}
help me please, thanks.
I can't find any problems with your code.
My advice is to try restarting your IDEs and if the problem still persists try restarting your whole computer.
Related
I am transferring attachments from Zoho to Netsuite. But facing problems while attaching it to opportunity or any other object. I have already uploaded the file to the file cabinet in netsuite and tried to bind it with the records notes. But that doesn't work. It only adds the note to the record but no sign of any file in the file option.
Thank you.
enter image description here
You would use the record.attach function. You would need the internal id of the file and of the transaction. In SS1 (using nlapiAttachRecord) it was important to list the file arguments first. The SS2 syntax makes that clearer:
record.attach({
record:{
type:'file',
id:fileid
},
to:{
type:'transaction',
id:transactionid
}
});
/**
* #NApiVersion 2.1
* #NScriptType MapReduceScript
* #NModuleScope SameAccount
*/
/**
* In this I am using Map Reduce script to process & attach multiple files from
* FileCabinet of NetSuite. So that it never goes out of governance.
/
define(['N/record','N/query'],
(record,query) => {
const getInputData = (getInputDataContext) => {
try
{
/**
* Query for getting transaction ID & other header detail of record.
*/
let transQuery = "SELECT custrecord_rf_tid as tid, custrecord_rf_fid as fid, id FROM customrecord_rflink where custrecord_rf_comp <> 'T' and custrecord_rf_type = 11";
let transQueryResult = runSuiteQuery(transQuery);
if(transQueryResult.length > 0){
log.debug("Count of record left to process--->", transQueryResult.length);
return transQueryResult;
}else{ //Incase where no transaction was left to transform.
log.debug({title: "No Remaining Transaction!"});
return 1;
}
}
catch (e)
{
log.error({title: "Error inside getinput data.", details: [e.message,e.stack]});
}
}
const map = (mapContext) => {
try{
let mapData = JSON.parse(mapContext.value);
log.debug({title: "mapData after parse", details: mapData});
let staginRecId = Number(mapData.id);
let fileId = Number(mapData.fid);
let billId = Number(mapData.tid);
let outputVal = attachfile('file',fileId, 'inventoryadjustment', billId);
let staginRec;
if(outputVal === true){
staginRec = record.submitFields({
type: 'customrecord_rflink',
id: staginRecId,
values: {
'custrecord_rf_comp': true
}
});
log.debug("record saved with id-->", staginRecId);
}else{
log.debug("record saving failed with id-->", staginRecId);
}
}
catch(e){
log.error({title: "Error in Map", details: [e.message,e.stack]});
}
}
const reduce = (reduceContext) => {
}
const summarize = (summarizeContext) => {
log.debug('Summarize completed');
}
function runSuiteQuery(queryString) {
log.debug("Query", queryString);
let resultSet = query.runSuiteQL({
query: queryString
});
log.debug("Query wise Data", resultSet.asMappedResults());
if(resultSet && resultSet.results && resultSet.results.length > 0) {
return resultSet.asMappedResults();
} else {
return [];
}
}
function attachfile(recType, recId, recTypeTo, recIdTo) {
record.attach({
record: {
type: recType,
id: recId
},
to: {
type: recTypeTo,
id: recIdTo
}
});
return true;
}
return {getInputData,map,reduce,summarize};
});
In the code below, when I run in debug mode with a break-point at this line: content.push(data.Body.toString()); I can see that data is inserted to the content array.
However when I run the code normally, content comes back empty.
How can I get it to populate the array for downstream use?
var params = { Bucket: "thebucket", Prefix: "theprefix/" }
var content = [];
function getS3Data()
{
var s3 = new aws.S3();
s3.listObjects(params, function (err, data)
{
if (err) throw err; // an error occurred
else
{
var i;
for (i = 0; i < data.Contents.length; i++)
{
var currentValue = data.Contents[i];
if(currentValue.Key.endsWith(params.Prefix) == false)
{
var goParams = { Bucket: params.Bucket, Key: currentValue.Key };
s3.getObject(goParams, function(err, data)
{
if (err) throw err; //error
content.push(data.Body.toString());
});
};
};
}//else
});//listObjects
}//getS3Data
getS3Data();
console.log(content); //prints empty here when run in non-debug.
The line:
console.log(content)
is being executed before the line:
content.push(data.Body.toString());
the function you are passing as a 2nd argument to s3.listObjects will be executed asynchronously. If you want to log out content you need to do it within the callback function meaning:
s3.listObjects(params, function (err, data) {
if (err) throw err;
else {
// ...
console.log(content)
}
});
A better approach would be to implement getS3Data with Promise so you can run code after the object listing is done for sure.
function getS3Data() {
return new Promise((resolve, reject) => {
if (err) {
reject(err)
} else {
const promises = []
for (const i = 0; i < data.Contents.length; i++) {
const currentValue = data.Contents[i];
if (currentValue.Key.endsWith(params.Prefix) == false) {
const goParams = { Bucket: params.Bucket, Key: currentValue.Key };
promises.push(new Promise((res, rej) => {
s3.getObject(goParams, function (err, data) {
if (err) {
rej(err); //error
} else {
res(data.Body.toString());
}
});
}));
}
}
Promise.all(promises).then(resolve);
}
});
}
getS3Data()
.then(result => { // this will actually be `content` from your code example
console.log(result);
}).catch(error => {
console.error(error);
})
Node.js' documentation has an example very similar to the problem you are experiencing:
Dangers of Mixing Blocking and Non-Blocking Code
The issue arises because the variable content is not set as soon as getS3Data has finished, because it is an asynchronous function. content will be set some time later. But your call to console.log(content); will execute immediately after getS3Data has finished, so at that point content has not been set yet.
You can test that by adding an extra log:
s3.getObject(goParams, function(err, data)
{
if (err) throw err; //error
content.push(data.Body.toString());
console.log("Content has been assigned");
});
And then change the bottom to:
getS3Data();
console.log("getS3Data has finished", content);
It's likely you'll get the messages in this order:
getS3Data has finished
Content has been assigned
This question relates to a Node.js mssql API.
I've recently updated my code to use a SQL.ConnectionPool instead of sql.connect which when combined with an async / await function allowed me to get around connection.close() errors.
In my previous (OLD) executeQuery function, I was able to pass an array which I could push values into to use with "request.input(name, value)"
Function call example:
app.get('/api/route/:id', function (req, res) {
var id = req.params.id;
let x = []
if (id != null && id != NaN) {
x.push({
Name: 'id', Value: id
})
var query = `SELECT * from [Table] where ID = #id`
executeQuery(res, query, arr);
} else {
res.send(500)
}
})
OLD Function:
var executeQuery = function (res, query, arr) {
sql.connect(dbConfig, function (err) {
if (err) {
console.log('Error while connecting to the database: ' + err)
res.send(err)
} else {
// Create the request object
var request = new sql.Request();
if (arr != null) {
if (arr.length > 0) {
for (var obj of arr) {
request.input(obj.Name, obj.Value)
}
}
}
request.query(query, function (err, rs) {
if (err) {
sql.close();
console.log('Error while querying the database : ' + err);
res.send(err);
} else {
sql.close();
console.log(rs)
res.send(rs)
}
})
}
})
}
NEW Function:
var executeQuery = async function(res, query, arr){
const pool = new sql.ConnectionPool(dbConfig);
pool.on('error', err => {
console.log('sql errors ', err);
});
try {
await pool.connect();
let result = await pool.request().query(query);
console.log('success')
res.send(result);
return {success: result};
} catch (err) {
console.log('error')
console.log(err)
res.send(err);
return {err: err};
} finally {
pool.close();
}
}
Question
How do I go about achieving the same request.input process with a ConnectionPool as I did with my previous function ( like the below )
var request = new sql.Request();
if (arr != null) {
if (arr.length > 0) {
for (var obj of arr) {
request.input(obj.Name, obj.Value)
}
}
}
Thank you.
I have an emails object that contains an array in a mongodb database. However, when I try to use $set to make the array empty it doesn't work. How am I supposed to clear the array?
exports.clearEmails = function(req, res, next) {
var listId = req.params.id;
var errors = req.validationErrors();
if (errors) {
return res.status(400).send(errors);
}
EmailList.update({'_id': listId}, {$set: {'emails': []}}, function(err,results) {
if (err) {
return res.status(400).send(err);
} else {
return res.status(200).send(results);
}
});
}
Promise.try(function (){
return Promise.all(splitup); // [Show, me, stocks, for, Google, and, Microsoft]
}).each(function (item) { // Looping through entities, [Show] [me] [stocks] ...
alchemyapi.entities('text', item, { 'sentiment' : 1}, function (response) {
if(response.entities) { // An entity is found, ex. Microsoft
if(response.entities[0].type === "Company") {
requestBody.push(item);
console.log("Item was found, added " + item);
} else {
console.log(item + " Is not a company");
}
} else { // no entity found for that one word
console.log("No entity found for " + item);
}
});
}).then(function (response) {
// send requestBody when loop is completed.
});
I start by returning an array of strings splitup so I can loop through each element on line 3.
Let's say the splitup array looks like: [Apple, And, Mexico]
Apple is a company, so if(response.entities) returns true, it then checks the JSON response to see if it is a company, that statement returns true and It's added to the new requestBody array I'm building up.
Next, the word 'And' returns false on if(response.entities) so It goes to the else statement.
Next, let's pick Mexico, It'll return true for if(response.entities) but return false on if(response.entities[0].type === "Company")
My question is, I'd like to return the new requestBody array when It has completed looping through each item, but I'm not entirely sure how I can tell when the loop is completed, and when to return requestBody
You need to use Promise.filter instead of Promise.each. Promise.filter filters the given array to another using filterer function passed to it.
So when you encounter a company ('Apple') you resolve with its value, if its anything else ('Mexico' and 'And') you resolve with false.
Promise.filter(splitup, function (item) { // Looping through entities, [Show] [me] [stocks] ...
return new Promise(function(resolve, reject) {
alchemyapi.entities('text', item, { 'sentiment' : 1}, function (response) {
if(response.entities) { // An entity is found, ex. Microsoft
if(response.entities[0].type === "Company") {
console.log("Item was found, added " + item);
return resolve(item);
} else {
console.log(item + " Is not a company");
return reject(false);
}
} else { // no entity found for that one word
console.log("No entity found for " + item);
return reject(false);
}
});
});
}).then(function (requestBody) {
// send requestBody when loop is completed.
});
Ok too late :). Here was my result:
var alchemyapi = require('alchemy-api');
var Promise = require('bluebird');
var alchemyapi = new alchemyapi(<YOUR_KEY>);
var test = ['Microsoft', 'and', 'Apple'];
Promise.filter(test, function(item) {
return getCompanyName(item).then(function(){
return true;
}, function(reason) {
console.log(reason.message);
return false;
});
}).then(function(requestBody){
console.log(requestBody);
});
function getCompanyName(item) {
return new Promise(function(resolve, reject) {
alchemyapi.entities(item, {sentiment: 1}, function (err, response) {
if (err) reject(err);
if (response.entities.length > 0) { // An entity is found, ex. Microsoft
if (response.entities[0].type === "Company") {
resolve(item);
} else {
reject(new Error(item + " Is not a company"));
}
} else { // no entity found for that one word
reject(new Error("No entity found for " + item));
}
});
});
}