I have to run a query on SQL Server 2014 using the node.js mssql package. To do so, use the query below with the two input parameters. When I execute the T-SQL code, the following error shows up:
RequestError: The conversion of a varchar data type to a datetime data type resulted in an out-of-range value
How can I solve it?
Input values:
IdCantiere: 14
Data: 2018-06-21 09:20:04.000
Node.js code:
async function CaricaRisorseCantiere(IdCantiere, Data) {
var value = [];
var query = "select RisorseUmane.IdRisorseUmane,IdUtenteInserimento,u1.Nome+' '+u1.Cognome as InseritoDA,ExtraPreventivo,u2.Nome+' '+u2.Cognome as Risorsa,RisorseUmane.IdUtente,IdCantiere,CONVERT(VARCHAR(10), Data, 105) as Data,Descrizione,convert(varchar(5), OreInizio, 108) as OreInizio,convert(varchar(5), OreFine, 108) as OreFine,REPLACE(Pausa, '.', ':') as Pausa,convert(varchar(5), Cast(convert(varchar(5), (OreFine - OreInizio), 108) as datetime) - CAST(REPLACE(Pausa, '.', ':') as datetime), 108) as TotaleOre from RisorseUmane inner join Utente as u1 on u1.IdUtente = RisorseUmane.IdUtenteInserimento inner join Utente as u2 on u2.IdUtente = RisorseUmane.IdUtente ";
if (Data == "") {
query = query + " where RisorseUmane.IdCantiere= #IdCantiere order by convert(datetime, Data, 103) desc ";
} else {
query = query + " inner join RisorsaRapportoMobile on RisorsaRapportoMobile.IdRisorseUmane=RisorseUmane.IdRisorseUmane where RisorseUmane.IdCantiere= #IdCantiere and RisorsaRapportoMobile.IdRapportoMobile is null and RisorseUmane.Data=convert(varchar,convert(datetime,#Data),105) ";
}
const ret = await new Promise((resolve, reject) => {
new sql.ConnectionPool(DbConfig.config).connect().then(pool => {
if (Data == "") {
return pool.request().input('IdCantiere', sql.Int, IdCantiere).query(query)
} else {
return pool.request().input('IdCantiere', sql.Int, IdCantiere).input('Data', sql.VarChar, Data).query(query)
}
}).then(result => {
resolve(result);
sql.close();
}).catch(err => {
console.log("Errore Risorse Model: ", err)
reject(err);
sql.close();
})
});
for (var i = 0; i < ret.recordset.length; i++) {
value.push({
IdRisorseUmane: ret.recordset[i].IdRisorseUmane,
IdUtenteInserimento: ret.recordset[i].IdUtenteInserimento,
InseritoDA: ret.recordset[i].InseritoDA,
ExtraPreventivo: ret.recordset[i].ExtraPreventivo,
Risorsa: ret.recordset[i].Risorsa,
Data: ret.recordset[i].Data,
Descrizione: ret.recordset[i].Descrizione,
TotaleOre: ret.recordset[i].TotaleOre
})
}
return value;
}
Instead of using the input call:
request.input('Data', sql.VarChar, Data)
The call should be changed to:
request.input('Data', sql.DateTime, new Date(Data));
Also, the SQL clause:
and RisorseUmane.Data=convert(varchar,convert(datetime,#Data),105)
Should become
and RisorseUmane.Data=convert(varchar,#Data,105)
Related
String.Join in efcore not support and I want to get list of string with separator like sql function String_Agg
I tried to create custom sql server function but i get this error:
The parameter 'columnPartArg' for the DbFunction 'QueryHelper.StringAgg(System.Collections.Generic.IEnumerable`1[[System.String, System.Private.CoreLib, Version=5.0.0.0, Culture=neutral, PublicKeyToken=]],System.String)' has an invalid type 'IEnumerable'. Ensure the parameter type can be mapped by the current provider.
This is my function and OnModelCreatingAddStringAgg for register it in my dbcontext
public static string StringAgg(IEnumerable<string> columnPartArg, [NotParameterized] string separator)
{
throw new NotSupportedException();
}
public static void OnModelCreatingAddStringAgg(ModelBuilder modelBuilder)
{
var StringAggFuction = typeof(QueryHelper).GetRuntimeMethod(nameof(QueryHelper.StringAgg), new[] { typeof(IEnumerable<string>), typeof(string) });
var stringTypeMapping = new StringTypeMapping("NVARCHAR(MAX)");
modelBuilder
.HasDbFunction(StringAggFuction)
.HasTranslation(args => new SqlFunctionExpression("STRING_AGG",
new[]
{
new SqlFragmentExpression((args.ToArray()[0] as SqlConstantExpression).Value.ToString()),
args.ToArray()[1]
}
, nullable: true, argumentsPropagateNullability: new[] { false, false }, StringAggFuction.ReturnType, stringTypeMapping));
}
and this code run above function
_context.PersonnelProjectTimeSheets.GroupBy(c => new { c.Date.Date, c.PersonnelId, c.Personnel.PersonnelCode, c.Personnel.FirstName, c.Personnel.LastName})
.Select(c => new PersonnelProjectTimeOutputViewModel
{
IsConfirmed = c.Min(c => (int)(object)(c.IsConfirmed ?? false)) == 1,
PersonnelDisplay = c.Key.PersonnelCode + " - " + c.Key.FirstName + " " + c.Key.LastName,
PersonnelId = c.Key.PersonnelId,
Date = c.Key.Date,
ProjectName = QueryHelper.StringAgg(c.Select(x=>x.Project.Name), ", "),
TotalWorkTime = 0,
WorkTimeInMinutes = c.Sum(c => c.WorkTimeInMinutes),
});
And also i change my StringAgg method input to
string columnPartArg
and change SqlFunctionExpression of OnModelCreatingAddStringAgg to
new[]
{
new SqlFragmentExpression((args.ToArray()[0] as
SqlConstantExpression).Value.ToString()),
args.ToArray()[1]
}
and change my query code to
ProjectName = QueryHelper.StringAgg("Project.Name", ", ")
now when run my query, sql server could not recognize the Project
i guess the parameter 'columnPartArg' of dbfunction 'STRING_AGG' is varchar or nvarchar. right?
most database function or procedure has not table value as parameter.
in this case,use EFCore's 'client evaluation' is good sulution. linq like below:
_context.PersonnelProjectTimeSheets.GroupBy(c => new { c.Date.Date, c.PersonnelId, c.Personnel.PersonnelCode, c.Personnel.FirstName, c.Personnel.LastName})
.Select(c => new PersonnelProjectTimeOutputViewModel
{
IsConfirmed = c.Min(c => (int)(object)(c.IsConfirmed ?? false)) == 1,
PersonnelDisplay = c.Key.PersonnelCode + " - " + c.Key.FirstName + " " + c.Key.LastName,
PersonnelId = c.Key.PersonnelId,
Date = c.Key.Date,
ProjectName = string.Join(", ",c.Select(x=>x.Project.Name)),//Client evaluation
TotalWorkTime = 0,
WorkTimeInMinutes = c.Sum(c => c.WorkTimeInMinutes),
});
I have a stored procedure that I just converted to Snowflake Javascript from PL/SQL. It inserts about 100 records a minute. The total record count is about 700. Because it is so difficult to know where a problem is in Snowflake, I insert log statements as the overall functionality progresses. I also push messages to an array that gets returned at the bottom. However, I do the insert into log table type things in PL/SQL and it barely makes a performance difference. I'll admit that my progress loading slows down the process, but am doubtful that it's the primary contributor.
The script makes a table that, given a date, shows the fiscal quarter that it corresponds to. This is helpful for other queries not shown. I have a simple loop that goes from the beginning of the first quarter to the end of the last and puts the corresponding quarter in the lookup table.
It took 9 minutes to run as written, but in Oracle, takes less than a second.
I'd like to know how to make this run faster:
create or replace procedure periodic_load()
RETURNS varchar
LANGUAGE javascript
execute as owner
as
$$
var result = "";
var messages = new Array();
try {
/**
Constants shared between functions
*/
var SINGLE_QUOTE_CHAR="'";
var DOUBLE_QUOTE_CHAR="\"";
var COMMA_CHAR=",";
var LEFT_PARENTHESIS="(";
var RIGHT_PARENTHESIS=")";
var ESCAPED_SINGLE_QUOTE_CHAR="\\'";
var ESCAPED_DOUBLE_QUOTE_CHAR="\\\"";
var CONSOLE_LOG_USED = true;
var IS_SNOWFLAKE = false;
/*
Execute Snowflake SQL or simulate the execution thereof
#parmam sqlTextIn,binds...
sqlTextIn: String of the sql command to run.
binds: zero or more parameters to bind to the execution of the command.
*/
function execute_with_log() {
var result = null;
messages.push('###'+"execute_with_log()");
messages.push('###'+"EXECUTE_WITH_LOG(BP1)");
var argumentsArray = Array.prototype.slice.apply(arguments);
var sqlTextIn = argumentsArray[0];
messages.push('###'+'EXECUTE_WITH_LOG argument count: '+arguments.length);
if(!IS_SNOWFLAKE) {
messages.push('###'+ "EXECUTE_WITH_LOG(BP2)");
console.log('SKIPPING SNOWFLAKE SQL: '+sqlTextIn);
} else {
messages.push('###'+ " EXECUTE_WITH_LOG(BP3)");
var statementResult;
var logMessage = sqlTextIn;
if(argumentsArray.length==1) {
messages.push('###'+ " EXECUTE_WITH_LOG(BP4)");
messages.push('###'+" ** NO BIND PARAMETERS DETECTED **");
} else {
messages.push('###'+ " EXECUTE_WITH_LOG(BP5)");
for(var bindParmCounter = 1; bindParmCounter < argumentsArray.length; bindParmCounter++) {
messages.push('###'+" ,"+argumentsArray[bindParmCounter]);
}
}
messages.push('###'+ " EXECUTE_WITH_LOG(BP6)");
log_message('I',logMessage);
if(argumentsArray.length===1) {
messages.push('###'+ " EXECUTE_WITH_LOG(BP7)");
statement = snowflake.createStatement( { sqlText: sqlTextIn });
} else {
messages.push('###'+ " EXECUTE_WITH_LOG(BP8)");
var bindsIn = argumentsArray.slice(1,argumentsArray.length);
for(var bindParmCounter = 0; bindParmCounter < bindsIn.length; bindParmCounter++) {
messages.push('###bindsIn['+bindParmCounter+"]="+bindsIn[bindParmCounter]);
messages.push('###bindsIn['+bindParmCounter+"] type ="+bindsIn[bindParmCounter].getName());
}
statement = snowflake.createStatement(
{
sqlText: sqlTextIn,
binds: bindsIn
}
);
}
messages.push('###'+ " EXECUTE_WITH_LOG(BP9) sqlTextIn="+sqlTextIn);
result = statement.execute();
messages.push('###'+ " After execute BP10 =");
commit();
messages.push('###'+ " After commit BP11 =");
}
return result;
}
function commit() {
messages.push('###'+ " commit");
statement = snowflake.createStatement(
{
sqlText: 'commit'
}
);
statement.execute();
return messages;
}
function log_message(severity,message) {
messages.push('###'+"log_message(severity,message): severity="+severity+" message="+message);
var result = null;
if(!IS_SNOWFLAKE) {
console.log(severity+": "+message);
messages.push('###'+severity+": "+message);
} else {
var record = {'severity': severity,'date_time': {value: 'current_timestamp::timestamp_ntz',useQuote:false},message:message};
try {
var escapeStep1=message.replaceAll(SINGLE_QUOTE_CHAR,ESCAPED_SINGLE_QUOTE_CHAR);
var escapeStep2=escapeStep1.replaceAll(DOUBLE_QUOTE_CHAR,ESCAPED_DOUBLE_QUOTE_CHAR);
quotedValue=SINGLE_QUOTE_CHAR+escapeStep2+SINGLE_QUOTE_CHAR;
var quotedSeverity = SINGLE_QUOTE_CHAR+severity+SINGLE_QUOTE_CHAR;
var sql_command = "insert into LOG_MESSAGES(severity,date_time,message) values("+quotedSeverity+",current_timestamp::timestamp_ntz,"+quotedValue+")";
statement = snowflake.createStatement( { sqlText: sql_command});
var sql_command = "commit";
statement = snowflake.createStatement( { sqlText: sql_command});
} catch(error) {
messages.push('###'+'FAILURE: '+error);
}
}
return result;
}
function truncate_table(tableName) {
messages.push('###'+"(truncate_table()");
var result = execute_with_log("truncate table "+tableName);
messages.push('###'+'I','End truncate_table()');
return result;
}
function fql() {
messages.push('###'+"begin fql()");
log_message('I','Begin fql()');
var table_name='fiscal_quarter_list';
truncate_table(table_name);
execute(
"insert into fiscal_quarter_list (fiscal_quarter_id,fiscal_quarter_name,fiscal_year,start_date,end_date,last_mod_date_stamp) ("
+" select fiscal_quarter_id,fiscal_quarter_name,fiscal_year,min(start_date) start_date,max(end_date) end_date,current_date from cdw_fiscal_periods cfp"
+" where (cfp.start_date >= add_months(sysdate(),-24) and sysdate() >= cfp.end_date ) or "
+" (cfp.start_date <= sysdate() and sysdate() < cfp.end_date) "
+" group by fiscal_quarter_id,fiscal_quarter_name,fiscal_year "
+" order by fiscal_quarter_id desc "
+" fetch first 8 rows only "
+")"
);
log_message('I','End fql()');
}
/*
Function to increment a Date object by one standard day
Sourced from https://stackoverflow.com/questions/563406/add-days-to-javascript-date
*/
function addDaysInJs(dateIn, days) {
var result = new Date(dateIn);
result.setDate(result.getDate() + days);
return result;
}
function dtfq() {
messages.push('###'+"dtfq()");
tableName = 'date_to_fiscal_quarter';
var firstDate;
var runningDate;
log_message('I','Begin dtfq');
truncate_table(tableName);
var result = null;
var resultSet = execute_with_log(" SELECT FISCAL_QUARTER_ID, FISCAL_QUARTER_NAME,try_to_date(START_DATE) as START_DATE, try_to_date(END_DATE) as END_DATE"
+ " FROM FISCAL_QUARTER_LIST "
+ " ORDER BY START_DATE ");
log_message('D','resultSet ='+resultSet);
log_message('D','resultSet typeof='+typeof resultSet);
while(resultSet.next()) {
messages.push('###'+"bp1 dtfq() loop start_date="+resultSet.getColumnValue("START_DATE")+" end_date="+resultSet.getColumnValue("END_DATE"));
firstDate = resultSet.getColumnValue("START_DATE");
lastDate = resultSet.getColumnValue("END_DATE");
runningDate=new Date(firstDate);
lastDate = new Date(lastDate);
log_message('D','Start date='+firstDate);
while (runningDate <= lastDate) {
var fiscalQuarterId=resultSet.getColumnValue("FISCAL_QUARTER_ID")
var fiscalQuarterName=resultSet.getColumnValue("FISCAL_QUARTER_NAME")
messages.push('###'+"bp2 dtfq() runningDate="+runningDate+' fiscalQuarterId='+fiscalQuarterId+' fiscalQuarterName='+fiscalQuarterName);
log_message('D','Fiscal quarter id='+fiscalQuarterId);
/*
execute_with_log(" insert into sc_hub_date_to_fiscal_quarter(date_stamp,) "
+" values(try_to_date(?)) "
,runningDate.toISOString());
*/
execute_with_log(" insert into sc_hub_date_to_fiscal_quarter(date_stamp,fiscal_quarter_id,fiscal_quarter_name) "
+" values(?,?,?)"
,runningDate.toISOString()
,fiscalQuarterId
,fiscalQuarterName);
runningDate = addDaysInJs(runningDate, 1);
}
}
log_message('I','End dtfq Success');
return result;
}
/*
Execute Snowflake SQL or simulate the execution thereof
#parmam sqlTextIn,binds...
sqlTextIn: String of the sql command to run.
binds: zero or more parameters to bind to the execution of the command.
*/
function execute() {
messages.push('###'+"execute():");
var result = null;
var argumentsArray = Array.prototype.slice.apply(arguments);
var sqlTextIn = argumentsArray[0];
if(!IS_SNOWFLAKE) {
console.log('SKIPPING SNOWFLAKE SQL: '+sqlTextIn);
messages.push('###'+'SKIPPING SNOWFLAKE SQL: '+sqlTextIn);
} else {
messages.push('###'+'USING SNOWFLAKE SQL: '+sqlTextIn);
var statementResult;
if(argumentsArray.length>2) {
messages.push('###'+'Has bind arguments: ');
var bindsIn = argumentsArray.slice(2,argumentsArray.length);
statement = snowflake.createStatement(
{
sqlText: sqlTextIn,
binds: bindsIn
}
);
} else {
messages.push('###'+'Has no bind arguments: ');
messages.push('###'+'###sqlText='+sqlTextIn+'###');
statement = snowflake.createStatement( { sqlText: sqlTextIn });
}
result = statement.execute();
messages.push('###'+'statement.execute succeeded');
log_message('I',sqlTextIn);
}
return result;
}
String.prototype.replaceAll = function(target, replacement) {
return this.split(target).join(replacement);
};
Object.prototype.getName = function() {
var funcNameRegex = /function (.{1,})\(/;
var results = (funcNameRegex).exec((this).constructor.toString());
return (results && results.length > 1) ? results[1] : "";
};
dtfq();
} catch(error) {
messages.push('###'+error);
} finally {
result = messages.join("\n");
}
return result;
$$
;
call periodic_load()
The use-case isn't entirely stated here, but it appears that your stored procedure merely generates (explodes) and inserts a series of dates into a table, for each date range encountered in a source table input row.
This can be achieved with SQL (with recursive CTEs) directly, which would run far more efficiently than a linear stored procedure iteration:
create table destination_table (fiscal_quarter_id integer, fiscal_quarter_name string, date_stamp date);
insert into destination_table
with source_table(fiscal_quarter_id, fiscal_quarter_name, start_date, end_date) as (
select 1, 'Q1', '2020-01-01'::date, '2020-03-31'::date union all
select 2, 'Q2', '2020-04-01'::date, '2020-06-30'::date union all
select 3, 'Q3', '2020-07-01'::date, '2020-09-30'::date union all
select 4, 'Q4', '2020-10-01'::date, '2020-12-31'::date
), recursive_expand as (
select
fiscal_quarter_id, fiscal_quarter_name, start_date, end_date,
start_date as date_stamp
from source_table
union all
select
fiscal_quarter_id, fiscal_quarter_name, start_date, end_date,
dateadd(day, 1, date_stamp)::date date_stamp
from recursive_expand
where date_stamp < end_date
)
select fiscal_quarter_id, fiscal_quarter_name, date_stamp
from recursive_expand
order by date_stamp asc;
The example inserts 366 rows into the destination_table (2020 being a leap year) covering dates of all four quarters.
#Greg Pavlik's comment covers why the stored procedure is slow due to executing whole statements (each independently submitted, compiled, planned, executed, and returned from the snowflake query processing service adds a lot of overhead). If you'd still like to proceed with the stored procedures API for your use-case, an idea is to make two specific changes:
Store all generated data rows into an array instead of inserting them directly, like so (this is only practical for a few hundred rows, not beyond, due to memory constraints):
function dtfq() {
var all_rows = [];
// … iteration and other logic here …
all_rows.push([fiscalQuarterId, fiscalQuarterName, runningDate]);
// … iteration and other logic ends here (minus inserts) …
return all_rows;
}
Insert the list of n rows generated using a single generated INSERT statement with n value containers. An example of such code can be seen in this answer.
The legacy system used to store passwords in query's output format,
SELECT
HASHBYTES('SHA1', CONVERT(VARCHAR, HASHBYTES('SHA1', CONVERT(NVARCHAR(4000), ’test'))) + 'mysalt')
where the password is test and mysalt is the salt used.
The result is something like
0x169A0EF01AA369518D6810E14872A3A003A1F0AA
I have to take that encrypted password and create a node function to get the same result as the above query
Node.js is not going to replace a t-sql query. You would still use t-sql to query your database and something like the tedious module connection to the database. This is an example from https://msdn.microsoft.com/library/mt715784.aspx on how to connect from node.js to SQL Server and execute a query. Some modifications to the executeStatement function would get you going.
var Connection = require('tedious').Connection;
var config = {
userName: 'yourusername',
password: 'yourpassword',
server: 'yourserver.database.windows.net',
// When you connect to Azure SQL Database, you need these next options.
options: {encrypt: true, database: 'AdventureWorks'}
};
var connection = new Connection(config);
connection.on('connect', function(err) {
// If no error, then good to proceed.
console.log("Connected");
executeStatement();
});
var Request = require('tedious').Request;
var TYPES = require('tedious').TYPES;
function executeStatement() {
request = new Request("SELECT c.CustomerID, c.CompanyName,COUNT(soh.SalesOrderID) AS OrderCount FROM SalesLT.Customer AS c LEFT OUTER JOIN SalesLT.SalesOrderHeader AS soh ON c.CustomerID = soh.CustomerID GROUP BY c.CustomerID, c.CompanyName ORDER BY OrderCount DESC;", function(err) {
if (err) {
console.log(err);}
});
var result = "";
request.on('row', function(columns) {
columns.forEach(function(column) {
if (column.value === null) {
console.log('NULL');
} else {
result+= column.value + " ";
}
});
console.log(result);
result ="";
});
request.on('done', function(rowCount, more) {
console.log(rowCount + ' rows returned');
});
connection.execSql(request);
}
I am trying to push key, value in array and then converting it into JSON using JSON.stringify(). But it is not working.
My node.js code:
var jarray=[];
var json1="";
for (var i=0; i<jsonObj["Masters"]['Customer'].length; i++){
var name= jsonObj["Masters"]['Customer'][i];
var cust_name=name['Customer_Name'];
var cust_code=name['Customer_Code'];
connection.query("SELECT code FROM ((SELECT ccode AS code FROM customermaster WHERE companyid='AXWPM1658D') UNION ALL (SELECT scode AS code FROM suppliermaster WHERE companyid='AXWPM1658D') UNION ALL (SELECT stcode AS code FROM stockmaster WHERE companyid='AXWPM1658D') UNION ALL (SELECT gcode AS code FROM generalledger2 WHERE companyid='AXWPM1658D') UNION ALL (SELECT bcode AS code FROM bankmaster WHERE companyid='AXWPM1658D'))p where code='"+cust_code+"' ", function(err, rows, fields) {
if (!err){
var item ={"customer_name":cust_name ,"customer_code": cust_code };
jarray.push(item);
}
else{
console.log('Error while performing Query.'+err);
}
});
}
json1=JSON.stringify({jarray:jarray});
var jsonObj1 = JSON.parse(json1);
console.log("Json:"+jsonObj1);
console.log("arr length:"+jsonObj1.jarray.length);
It prints:
Json:{ jarray: [] }
arr length:0
My question is how to push values in array and convert it into JSON array?
What do you use to make SQL requests? It looks like it has an asynchronous behaviour. If so, it can be the reason on why your array is still empty when you stringify it. By the way, making SQL request in a loop is not very effective. Perhaps is it better to fetch all results with one SQL request, this will also make it easier to stringify in the callback
Use this:
var jarray = [];
var json1 = "";
var async = require('async');
async.forEachLimit(jsonObj["Masters"]['Customer'], 1, function(customer, callback) {
var name = customer;
var cust_name = name['Customer_Name'];
var cust_code = name['Customer_Code'];
connection.query("SELECT code FROM ((SELECT ccode AS code FROM customermaster WHERE companyid='AXWPM1658D') UNION ALL (SELECT scode AS code FROM suppliermaster WHERE companyid='AXWPM1658D') UNION ALL (SELECT stcode AS code FROM stockmaster WHERE companyid='AXWPM1658D') UNION ALL (SELECT gcode AS code FROM generalledger2 WHERE companyid='AXWPM1658D') UNION ALL (SELECT bcode AS code FROM bankmaster WHERE companyid='AXWPM1658D'))p where code='" + cust_code + "' ", function(err, rows, fields) {
if (!err) {
var item = {
"customer_name": cust_name,
"customer_code": cust_code
};
jarray.push(item);
callback();
} else {
callback(err);
console.log('Error while performing Query.' + err);
}
});
}, function(err) {
if (err) {
console.log(err);
} else {
json1 = JSON.stringify({
jarray: jarray
});
var jsonObj1 = JSON.parse(json1);
console.log("Json:" + jsonObj1);
console.log("arr length:" + jsonObj1.jarray.length);
}
})
I have a social app for which I am trying to create a friend activities feed using Azure Sql Server.
I have 3 tables I want to select from:
Songs
-createdAt
-id
-userId
-trackName
-etc
Comments
-createdAt
-id
-userId
-songId
-text
Likes
-createdAt
-id
-userId
-songId
I have the users that the current user is following stored in an array named 'follows'.
How do I go about selecting the 40 most recent items from those 3 tables where userId in each table is in the follows array?
Edit:
function getActivities(userId) {
var deferred = Q.defer();
var follows = [];
getFollowing(userId).then(function (results) {
follows.push(userId);
_.each(results, function (user) {
follows.push(user.toUserId);
});
return;
}).then(function () {
var stringified = "'" + follows.join("','") + "'";
var queryString = "SELECT * FROM comments, songs, likes WHERE comments.userId IN (" + stringified + ") OR songs.userId IN (" + stringified +") OR likes.userId IN (" + stringified + ")";
var params = [];
return sqlQuery(queryString, params);
}).then(function (results) {
console.log('Activities: ', results);
deferred.resolve(results);
}, function (error) {
console.log('Error: ', error.message);
deferred.reject(error.message);
});
return deferred.promise;
}
Alright, so I dug into JOINS a little more and realized how easy it actually is once you wrap your head around it. Here is what I did to complete this:
var queryString = "SELECT TOP 50 follows.id AS followId, follows.toUserId AS followToUserId, follows.fromUserId AS followFromUserId, comments.text AS commentText, profiles.userId, profiles.username, profiles.name, profiles.profileImage, songs.trackId, songs.trackName, songs.artistName, songs.collectionName, songs.artworkUrl100, songs.caption, songs.id AS songId, activities.id AS activityId, activities.type AS activityType, activities.objectId AS activityObjectId, activities.parentType AS activityParentType, activities.parentId AS activityParentId, activities.__createdAt AS activityCreatedAt FROM activities ";
queryString += "INNER JOIN profiles ON (profiles.userId = activities.userId) ";
queryString += "LEFT JOIN songs ON (songs.id = activities.objectId AND activities.type = 'songs') OR (songs.id = activities.parentId AND activities.parentType = 'songs') ";
queryString += "LEFT JOIN comments ON (activities.type = 'comments' AND comments.id = activities.objectId) ";
queryString += "LEFT JOIN follows ON (activities.type = 'followed' AND activities.userid = follows.fromUserId) ";
queryString += "WHERE activities.userId IN (SELECT follows.toUserId AS userId FROM follows WHERE follows.fromUserId = ? AND follows.isFollowed = 'true') ";
queryString += "ORDER BY activities.__createdAt DESC";
var params = [userId];
mssql.query(queryString, params, {
success: function (results) {
_.each(results, function (result) {
//Remove columns with null or undefined values
for (var i in result) {
if (result[i] === null || result[i] === undefined) {
delete result[i];
}
}
});
response.send(200, results);
},
error: function (error) {
response.send(400, error.message);
}
});