coffeescript looping through array and adding values - arrays

What I'd like to do is add an array of students to each manager (in an array).
This is where I'm getting stuck:
for sup in sups
do(sup) ->
sup.students_a = "This one works"
getStudents sup.CLKEY, (studs) ->
sup.students_b = "This one doesn't"
cback sups
EDIT: After some thought, what may be happening is that it is adding the "sudents_b" data to the sups array, but the sups array is being returned (via cback function) before this work is performed. Thus, I suppose I should move that work to a function and only return sups after another callback is performed?
For context, here's the gist of this code:
odbc = require "odbc"
module.exports.run = (managerId, cback) ->
db2 = new odbc.Database()
conn = "dsn=mydsn;uid=myuid;pwd=mypwd;database=mydb"
db2.open conn, (err) ->
throw err if err
sortBy = (key, a, b, r) ->
r = if r then 1 else -1
return -1*r if a[key] > b[key]
return +1*r if b[key] > a[key]
return 0
getDB2Rows = (sql, params, cb) ->
db2.query sql, params, (err, rows, def) ->
if err? then console.log err else cb rows
getManagers = (mid, callback) ->
supers = []
queue = []
querySupers = (id, cb) ->
sql = "select distinct mycolumns where users.id = ? and users.issupervisor = 1"
getDB2Rows sql, [id], (rows) ->
for row in rows
do(row) ->
if supers.indexOf row is -1 then supers.push row
if queue.indexOf row is -1 then queue.push row
cb null
addSupers = (id) -> # todo: add limit to protect against infinate loop
querySupers id, (done) ->
shiftrow = queue.shift()
if shiftrow? and shiftrow['CLKEY']? then addSupers shiftrow['CLKEY'] else
callback supers
addMain = (id) ->
sql = "select mycolumns where users.id = ? and users.issupervisor = 1"
getDB2Rows sql, [id], (rows) ->
supers.push row for row in rows
addMain mid
addSupers mid
getStudents = (sid, callb) ->
students = []
sql = "select mycols from mytables where myconditions and users.supervisor = ?"
getDB2Rows sql, [sid], (datas) ->
students.push data for data in datas
callb students
console.log "Compiling Array of all Managers tied to ID #{managerId}..."
getManagers managerId, (sups) ->
console.log "Built array of #{sups.length} managers"
sups.sort (a,b) ->
sortBy('MLNAME', a, b) or # manager's manager
sortBy('LNAME', a, b) # manager
for sup in sups
do(sup) ->
sup.students_a = "This one works"
getStudents sup.CLKEY, (studs) ->
sup.students_b = "This one doesn't"
cback sups

You are correct that your callback cback subs is executed before even the first getStudents has executed it's callback with the studs array. Since you want to do this for a whole array, it can grow a little hairy with just a for loop.
I always recommend async for these things:
getter = (sup, callback) ->
getStudents sup.CLKEY, callback
async.map sups, getter, (err, results) ->
// results is an array of results for each sup
callback() // <-- this is where you do your final callback.
Edit: Or if you want to put students on each sup, you would have this getter:
getter = (sup, callback) ->
getStudents sup.CLKEY, (studs) ->
sup.students = studs
// async expects err as the first parameter to callbacks, as is customary in node
callback null, sup
Edit: Also, you should probably follow the node custom of passing err as the first argument to all callbacks, and do proper error checking.

Related

How to implement stream with skip and conditional stop

I try to implement batch processing. My algo:
1) First I need request items from db, initial skip = 0. If no items then completely stop processing.
case class Item(i: Int)
def getItems(skip: Int): Future[Seq[Item]] = {
Future((skip until (skip + (if (skip < 756) 100 else 0))).map(Item))
}
2) Then for every item do heavy job (parallelism = 4)
def heavyJob(item: Item): Future[String] = Future {
Thread.sleep(1000)
item.i.toString + " done"
}
3) After all items processing, go to 1 step with skip += 100
Whats I trying:
val dbSource: Source[List[Item], _] = Source.fromFuture(getItems(0).map(_.toList))
val flattened: Source[Item, _] = dbSource.mapConcat(identity)
val procced: Source[String, _] = flattened.mapAsync(4)(item => heavyJob(item))
procced.runWith(Sink.onComplete(t => println("Complete: " + t.isSuccess)))
But I don't know how to implement pagination
The skip incrementing can be handled with an Iterator as the underlying source of values:
val skipIncrement = 100
val skipIterator : () => Iterator[Int] =
() => Iterator from (0, skipIncrement)
This Iterator can then be used to drive an akka Source which get the items and will continue processing until a query returns an empty Seq:
val databaseStillHasValues : Seq[Item] => Boolean =
(dbValues) => !dbValues.isEmpty
val itemSource : Source[Item, _] =
Source.fromIterator(skipIterator)
.mapAsync(1)(getItems)
.takeWhile(databaseStillHasValues)
.mapConcat(identity)
The heavyJob can be used within a Flow:
val heavyParallelism = 4
val heavyFlow : Flow[Item, String, _] =
Flow[Item].mapAsync(heavyParallelism)(heavyJob)
Finally, the Source and Flow can be attached to the Sink:
val printSink = Sink[String].foreach(t => println(s"Complete: ${t.isSuccess}"))
itemSource.via(heavyFlow)
.runWith(printSink)

How to write "in" query for Erlang mnesia?

I have a mnesia table, lets say employee. I need to find all employee records whose name is in EmployeeNameList = ["Erlich", "Richard", "Gilfoyle", "Dinesh"]. Is there a way to do this using mnesia:select or other function?
Following the documentation of Mnesia
It can be done as follows:
get_employees_by_name(NameList) ->
MatchHead = #employee{name = '$1', _ = '_'},
Result = '$_'
MatchSpec = [ { MatchHead, [{'=:=', '$1', Name}], [Result]} || Name <- NameList ],
F = fun() ->
mnesia:select(employee, MatchSpec)
end,
{atomic, Result} = mnesia:transaction(F),
Result.

Execute Microsoft SQL query on R Shiny

I am writing an R-Shiny app. Can some one tell me how to execute a Microsoft SQL query in R Shiny ?
This is what I have done so far:
data <- reactive({
conn <- reactive ({ databaseOpen(serverName="[serverName]", databaseName=[dbName])})
qr <- reactive ({ SELECT * from myTable })
res <- reactive ({databaseQuery(conn = conn,query = qr)})
close(conn)
View(res)
})
Any help is appreciated !
I was able to call a query by creating a function outside of the server and ui functions (in other words, in a global.r). Then the server function could call that query function using one of the inputs in the function.
Here is my code:
queryfunction <- function(zipper){
odbcChannel <- odbcConnect("myconnection")
querydoc <- paste0("
SELECT distinct *
FROM mydb
where substring(nppes_provider_zip,1,2) = '43'
and [provider_type] = 'General Practice'
")
pricetable <- sqlQuery(odbcChannel, querydoc)
close(odbcChannel)
pricetable[which(substring(pricetable$nppes_provider_zip,1,5)==zipper),]
}
server <- shinyServer(function(input, output) {
output$mytable1 <- renderDataTable(data.table(queryfunction(input$zip)))
})
I figured it out. It can be done as:
server.r
serverfun<-function(input, output){
# Storing values in myData variable
myData <- reactive({
# Opening database connection
conn <- databaseOpen(serverName = "myServer",databaseName = "myDB")
# Sample query which uses some input
qr <- paste( "SELECT name FROM Genes g WHERE Id = ",input$myId," ORDER BY name")
# Storing results
res <- databaseQuery(conn = conn,query = qr)
# closing database
databaseClose(conn)
# Returning results
res
})
output$tbTable <- renderTable({
# Checking if myData is not null
if(is.null(myData())){return ()}
# return myData
myData()
})
ui.r
library("shiny")
shinyUI(
pageWithSidebar(
headerPanel("Hide Side Bar example"),
sidebarPanel(
textInput("Id", "Enter ID below","1234")
),
mainPanel(
tabsetPanel(
tabPanel("Data", tableOutput("tbTable"))
)
)
)
)

How does it make query ndb.AND condition more smart

I try to make query for tag search.
tags: how many tags ex.3
q: array of tags ex.['foo','hoo','poo']
def queryByTags(cls, tags, q):
def one():
qry = models.Card.query(models.Card.tags_value == q[0])
return qry
def two():
qry = models.Card.query(ndb.AND(models.Card.tags_value == q[0],
models.Card.tags_value == q[1]))
return qry
def three():
qry = models.Card.query(ndb.AND(models.Card.tags_value == q[0],
models.Card.tags_value == q[1],
models.Card.tags_value == q[2]))
return qry
tags_len = {1: one,
2: two,
3: three,
}
return tags_len[tags]()
This method can use up to 3 tags. I can copy code myself and extend it until 7,8,9...
It is very sad way...
Is there any smart way?
In pseudo python-ndb (I didn't run my code but you'll get it) I would say that a way would be to do:
cards_count = Card.query().filter(tags_value==q[0])\
.filter(tags_value==q[1])\
.filter(tags_value==q[2]).count()
or if iterating dynamic array (unknown length)
cards_count = Card.query()
for value in q:
q = q.filter(tags_value==value)
cards_count = q.count()

What's the best way to prevent adding a record whose primary key is already present in mnesia?

Suppose I've got a simple record definition:
-record(data, {primary_key = '_', more_stuff = '_'}).
I want a simple function that adds one of these records to a mnesia database. But I want it to fail if there's already an entry with the same primary
key.
(In the following examples, assume I've already defined
db_get_data(Key)->
Q = qlc:q([Datum
|| Datum = #data{primary_key = RecordKey}
<- mnesia:table(data),
RecordKey =:= Key]),
qlc:e(Q).
)
The following works, but strikes me as sort of ugly ...
add_data(D) when is_record(D, data)->
{atomic, Result} = mnesia:transaction(fun()->
case db_get_data(D#data.primary_key) of
[] -> db_add_data(D);
_ -> {error, bzzt_duplicate_primary_key}
end
end),
case Result of
{error, _} = Error -> throw(Error);
_ -> result
end.
This works too, but is also ugly:
add_data(D) when is_record(D, data)->
{atomic, Result} = mnesia:transaction(fun()->
case db_get_data(D#data.primary_key) of
[] -> db_add_data(D);
_ -> throw({error, bzzt_duplicate_primary_key})
end
end).
It differs from the above in that the above throws
{error, bzzt_duplicate_primary_key},
whereas this one throws
{error, {badmatch, {aborted, {throw,{error, bzzt_duplicate_primary_key}}}}}
So: is there some convention for indicating this sort of error? Or is there a built-in way that I can get mnesia to throw this error for me?
I think both of them are fine, if you only make your code more pretty, like:
add_data(D) when is_record(D, data)->
Fun = fun() ->
case db_get_data(D#data.primary_key) of
[] -> db_add_data(D);
_ -> throw({error, bzzt_duplicate_primary_key})
end
end,
{atomic, Result} = mnesia:activity(transaction, Fun).
or
add_data(D) when is_record(D, data)->
Fun = fun() ->
case db_get_data(D#data.primary_key) of
[] -> db_add_data(D);
_ -> {error, bzzt_duplicate_primary_key}
end
end,
{atomic, Result} = mnesia:activity(transaction, Fun),
case Result of
{error, Error} -> throw(Error);
_ -> result
end.
Do you throw errors or return errors? I would return an error myself. We split out code out into mnesia work units - a module with a set of functions that perform basic mnesia activities not in transactions, and an api module which 'composes' the work units into mnesia transactions with functions that look very similar to the one above.

Resources