I'm working my way through a project using Marionette.js but I have come to a roadblock.
my models represents a leg of a trip driven by a delivery person. Each model has
pick_location, pickup_time, drop_location, drop_time
I need to be able to not only calculate the distance traveled during the delivery but also between deliveries. So that if
d1 - pick_loc-1, pick_time-1, drop_loc-1, drop_time-1, ....
d2 - pick_loc-2, pick_time-2, drop_loc-2, drop_time-2,...etc
I want to produce a display of
d1 - travel_time (pick to drop), travel_distance (pick to drop)
travel_time between deliveries, travel_distance between deliveries
d2 travel_time (pick to drop), travel_distance (pick to drop)
then sum the columns when done.
the sum at the end I'm thinking could be easily accomplished with a collection.reduce definition
the calculation of time and distance between locations within the model should be straight forward.
the hangup I'm having is calculating the intermediate values that involve calculations between models and displaying the calculated rows in the view.
It looks to me like you're essentially presenting the same information in each row:
| Context | Travel Time | Travel Distance |
Where Context is either a delivery or a transit. You could invoke a bit of trickery when you're creating your collection by injecting new objects into your collection that represent the transit models.
var Route = Backbone.Model.extend({
defaults: {
context: '',
beginTime: '',
endTime: '',
beginLocation: '',
endLocation: ''
}
});
var Trip = Backbone.Collection.extend({
model: Route,
parse: function(data) {
var out = [],
last;
_.each(data, function(route) {
if (last) {
out.push({
context: 'transit',
beginTime: last.endTime,
endTime: route.beginTime,
beginLocation: last.endLocation,
endLocation: route.beginLocation
});
}
out.push(_.extend({ context: 'delivery' }, route));
last = route;
});
return out;
}
});
I have a sample fiddle here.
http://jsfiddle.net/ccamarat/jMt8B/
Related
Dear Earth Engine community,
Can someone help me solving the following problem:
I want to compute the aggregate nightlight intensities (sum) within all first level administrative regions of the world. For that purpose I use a shapefile which contains the regional boundaries (GADM) and raster data on nightlight (VIIRS).
The issue with the following code is that 1) I am getting an error that say "Unknown element type provided: object. Expected: ee.Image, ee.ImageCollection, ee.FeatureCollection or ee.Element." for the nighttime.reduceRegion operation and 2) that only the last feature of the selection is returned on print(final).
Unfortunately I do not manage to solve these problems. It would be great if someone could help me improving the code. I am sure there are many issues since Javascript and the Earth Engine API are completely new to me..
Thanks a lot in advance!
// Import nighttime raster data.
var nighttimeCollection = ee.ImageCollection('NOAA/VIIRS/DNB/MONTHLY_V1/VCMSLCFG');
// Import shapefile containing region boundaries.
var region_boundaries = ee.FeatureCollection("users/hendrikscheewel/gadm_level_1");
// Select a specific year ::: Later this should be done within a loop.
var year = "2014";
// Aggregate monthly nighttime data to year x.
var nighttime = nighttimeCollection
.filter(ee.Filter.date(year+'-01-01', year+'-12-31'))
.select('avg_rad')
.reduce(ee.Reducer.mean());
// This function does the following:
// * Aggregrate nightlight data within a shape/feature by taking its sum,
// * Assign the result to the feature,
// * Create copy of feature with simplified geometry (centroid) and fewer columns,
// * Return the copy.
var compute_nightlight = function(feature) {
// Compute mean of average radiance for feature shape
var result = nighttime.reduceRegion({
geometry: feature.geometry(),
reducer: ee.Reducer.sum(),
scale: 30,
maxPixels: 1e9,
});
// Set "nightlight" as a new property.
feature = ee.Feature(feature.set('nightlight',result.get('avg_rad_mean')));
// Get the centroid of the feature's geometry.
var featureSimplified = feature.centroid();
// Keep this list of properties.
var keepProperties = ['GID_0','GID_1','NAME_0','NAME_1','nightlight'];
featureSimplified = featureSimplified.copyProperties(feature, keepProperties);
// Return a new Feature, copying properties from the old Feature.
return featureSimplified;
};
//print(compute_nightlight(region_boundaries.first()));
var final = region_boundaries.filter(ee.Filter.eq('NAME_0','Belgium')).iterate(compute_nightlight);
print(final)
Export.table.toDrive({
collection: final,
description: 'nl_'+year,
fileFormat: 'CSV'
});
Ok, I found my main mistake: Instead of using the .iterate() method I should have used the .map() method.
After some cleaning the code looks like this:
// Select a specific year
var year = "2014";
// Aggregate monthly nighttime data to year x.
var nighttime = nighttimeCollection
.filter(ee.Filter.date(year+'-01-01', year+'-12-31'))
.select('avg_rad')
.reduce(ee.Reducer.mean());
// This function does the following:
// * Aggregrate nightlight data within a shape/feature by taking its sum,
// * Assign the result to the feature,
// * Create copy of feature with simplified geometry (centroid) and fewer columns,
// * Return the copy.
var compute_nightlight = function(feature) {
// Compute mean of average radiance for feature shape
var result = nighttime.reduceRegion({
geometry: feature.geometry(),
reducer: ee.Reducer.sum(),
scale: 30,
maxPixels: 1e9,
});
// Set "nightlight" as a new property.
feature = ee.Feature(feature.set('nightlight',result.get('avg_rad_mean')));
// Return a new Feature, copying properties from the old Feature.
return feature.centroid();
};
var final = ee.FeatureCollection((region_boundaries).map(compute_nightlight));
Export.table.toDrive({
collection: final,
description: 'nl_'+year,
fileFormat: 'CSV'
});
I am still learning and need some guidance on how to form a complex nested array of objects from a JSON flat file.
Here is the current input:
[
{"id":"US-AL","state":"Alabama","industry":"All","category":"Cable Related Services","itemid":"12290","item":"Basic Cable Services","answer":"Exempt","explanation":"The sale of these services is not subject to sales tax.","citation":"Ala. Code sec. 40-23-1; Ala. Code sec. 40-23-2"},
{"id":"US-AL","state":"Alabama","industry":"All","category":"Cable Related Services","itemid":"12291","item":"Pay Per View","answer":"Exempt","explanation":"The sale of these services is not subject to sales tax.","citation":"Ala. Code sec. 40-23-1; Ala. Code sec. 40-23-2"},
{"id":"US-AL","state":"Alabama","industry":"Retail","category":"Sales Tax Holidays","itemid":"12524","item":"All Disaster Preparedness Supply","answer":"Exempt","explanation":"Alabama provides for an annual state sales tax holiday for severe weather preparedness items. Counties and municipalities are allowed to provide an exemption from local sales and use taxes from the same items during the same weekend as the state holiday.","citation":"Ala. Admin. Code r. 810-6-3-.66."},
{"id":"US-AL","state":"Alabama","industry":"Retail","category":"Sales Tax Holidays","itemid":"12525","item":"All Energy star qualified products","answer":"N/A","explanation":"Alabama does not provide a sales tax holiday for energy efficient products.","citation":"N/A"}
]
Here is the format I want it in:
[
{
"id":"US-AL",
"state":"Alabama",
"industries" [
{
"industry":"All",
"categories" [
{
"category":"Cable Related Services",
items [
{"itemid":"12290","item":"Basic Cable Services","answer":"Exempt","explanation":"The sale of these services is not subject to sales tax.","citation":"Ala. Code sec. 40-23-1; Ala. Code sec. 40-23-2"},
{"itemid":"12291","item":"Pay Per View","answer":"Exempt","explanation":"The sale of these services is not subject to sales tax.","citation":"Ala. Code sec. 40-23-1; Ala. Code sec. 40-23-2"}
],
"category":"Sales Tax Holidays",
items [
{"itemid":"12524","item":"All Disaster Preparedness Supply","answer":"Exempt","explanation":"Alabama provides for an annual state sales tax holiday for severe weather preparedness items. Counties and municipalities are allowed to provide an exemption from local sales and use taxes from the same items during the same weekend as the state holiday.","citation":"Ala. Admin. Code r. 810-6-3-.66."}
]
}
],
"industry":"Sales" ...
"id":"US-AR",
"state":"Arizona" ...
]
I've tried using .map, .reduce, .filter ...
Using this example, I was able to get one level formatted, but not sure if this is the right method or if there is an easier way to accomplish this.
var grouped = utm.reduce((r, o) => {
r[o.industry] = r[o.industry] || [];
r[o.industry].push(o);
return r;
}, {});
var rs = Object.keys(grouped).map(industry => ({ industry, categories: grouped[industry] }));
I found a great answer to this from this SO post: Convert flat array of objects into nested array of objects
I tried this and it does exactly what I was trying to do.
// Groups a flat array into a tree.
// "data" is the flat array.
// "keys" is an array of properties to group on.
function groupBy(data, keys) {
if (keys.length == 0) return data;
// The current key to perform the grouping on:
var key = keys[0];
// Loop through the data and construct buckets for
// all of the unique keys:
var groups = {};
for (var i = 0; i < data.length; i++)
{
var row = data[i];
var groupValue = row[key];
if (groups[groupValue] == undefined)
{
groups[groupValue] = new Array();
}
groups[groupValue].push(row);
}
// Remove the first element from the groups array:
keys.reverse();
keys.pop()
keys.reverse();
// If there are no more keys left, we're done:
if (keys.length == 0) return groups;
// Otherwise, handle further groupings:
for (var group in groups)
{
groups[group] = groupBy(groups[group], keys.slice());
}
return groups;
}
Then I specify what fields I want to be grouped here:
var UtmDataByState = groupBy(utm, ["id","industry","category"]);
Worked great! Thanks Steve :)
I created an array of struct elements. These structs get to contain an array of strings. I want to check if these strings happen to be in another array of strings.
How can I do that or what tools should I look into?
I found that I can use a command called "Set", but it doesn't seem to work arrays within a struct.
import UIKit
// Define structure
struct Drink {
var name: String
var content: Array<String>
var amount: Array<Int>
var desc: String
}
// Define drinks
var mojito = Drink(name: "Mojito", content: ["Rum","Club soda"], amount: [4,20], desc: "Summer drink")
var vodkaJuice = Drink(name: "Vodka juice", content: ["Vodka","Juice"], amount: [4,20], desc: "Cheap alcohol")
var list = [mojito,vodkaJuice]
// Define what ingredients you have
var stock = ["Gin", "Vodka", "Juice", "Club soda"]
How do I make a list of drinks I can make from what I have?
Use a Set instead of an array so you can simply do a subset check:
import UIKit
// Define structure
struct drink {
var name : String
var content : Set<String> // we use a Set instead
var amount : Array<Int>
var desc : String
}
// Define drinks
var mojito = drink(name: "Mojito", content: ["Rum","Club soda"], amount: [4,20], desc: "Summer drink")
var vodkaJuice = drink(name: "Vodka juice", content: ["Vodka","Juice"], amount: [4,20], desc: "Cheap alcohol")
var list = [mojito,vodkaJuice]
// Define what ingredients you have
var stock = ["Gin", "Vodka", "Juice", "Club soda"]
// find all instances of drinks where their contents
// are subsets of what you have in stock
let drinks = list.filter { $0.content.isSubset(of: stock) }
The importance of using sets instead of "for-loops inside for-loops" is performance. Set uses an internal hash table to look up an item in an extremely fast fashion. So the overall complexity of your lookups would be O(N.logM) for N items in list and M items in stock.
If you had done it with for loops, its complexity would be O(N.M) which could take longer and consume more battery depending on the number of items you have.
That doesn't mean you should always use sets though. Sets have tradeoffs. They bring in performance but their initial construction is slower and they don't support duplicate items. Use them only in specific cases like this. Never use sets because "they are faster", use them when they solve your specific problem.
I strongly recommend skimming over those additional data structures provided by Swift's runtime so you'll know which one to use and when.
I'm using sequelize and using raw query to get the datas from table. But I'm getting all of the model instances while I only need the dataValues.
My setup looks like this:
const sequelize = new Sequelize({
database: process.env.PGDATABASE,
username: process.env.PGUSER,
password: process.env.PGPASS,
host: process.env.PGHOST,
port: process.env.PGPORT,
dialect: "postgres"
});
getPostGres: () => {
return sequelize;
}
and the way I'm querying the database looks like this:
let messageRatingsArr = await getPostGres().query(
`SELECT mr.support_email, mr.support_name,
(select count(mrn."chatId") as total FROM message_ratings as mrn WHERE mrn."ratingType"='NEGATIVE' and mr.support_email = mrn.support_email) as negative,
(select count(mrp."chatId") as total FROM message_ratings as mrp WHERE mrp."ratingType"='POSITIVE' and mr.support_email = mrp.support_email) as positive,
(select count(mrm."chatId") as total FROM message_ratings as mrm WHERE mrm."ratingType"='MIXED' and mr.support_email = mrm.support_email) as mixed,
(select count(mru."chatId") as total FROM message_ratings as mru WHERE mru."ratingType"='NEUTRAL' and mr.support_email = mru.support_email) as neutral
FROM message_ratings mr
WHERE mr."createdAt" >= '${properFromDate}' AND mr."createdAt" <= '${properToDate}'
group by mr.support_email, mr.support_name
limit ${args.count} offset ${args.offset} `,
{
model: MessageRatingPG,
mapToModel: true
}
);
let messageRatings = messageRatingsArr.map(item=>{
return item.dataValues;
})
let result = connectionFromArray(messageRatings, args);
result.totalCount = messageRatings.length;
return result;
As you can see, since I'm mapping the data from the query which has all kinds of stuff like dataValues, _options, isNewRecord etc., looping through the array if I have a large data set isn't efficient, so what can I do to only get the dataValues?
From https://sequelize.org/master/manual/raw-queries.html:
In cases where you don't need to access the metadata you can pass in a query type to tell sequelize how to format the results. For example, for a simple select query you could do:
sequelize.query("SELECT * FROM `users`", { type: sequelize.QueryTypes.SELECT})
.then(users => {
// We don't need spread here, since only the results will be returned for select queries
})
Now, looking at your code, and comparing with the next paragraph in the docs:
A second option is the model. If you pass a model the returned data will be instances of that model.
// Callee is the model definition. This allows you to easily map a query to a predefined model
sequelize
.query('SELECT * FROM projects', {
model: Projects,
mapToModel: true // pass true here if you have any mapped fields
})
.then(projects => {
// Each record will now be an instance of Project
})
I'd suggest removing from your original code, the following:
{
model: MessageRatingPG,
mapToModel: true
}
and replacing it with { type: sequelize.QueryTypes.SELECT }
You have to add to your query the attribute raw. From the docs
Sometimes you might be expecting a massive dataset that you just want
to display, without manipulation. For each row you select, Sequelize
creates an instance with functions for update, delete, get
associations etc. If you have thousands of rows, this might take some
time. If you only need the raw data and don't want to update anything,
you can do like this to get the raw data.
Project.findAll({ where: { ... }, raw: true })
I have computed and displayed as a map layer the cross-covariance of Landsat-derived NDVI and CHIRPS precipitation data.
I now want to export this as an image, clipped to my region of interest, but am getting the following error:
Error 'Cannot export array bands'
I have not managed to find a solution. Is there a way to export this map layer as geotiff? I think perhaps I need to flatten the array but am unsure how to do this.
Here is the code below:
l8toa = ee.ImageCollection("LANDSAT/LC08/C01/T1_TOA")
//Define a region of interest - Baringo county, kenya
var Baringo2 = /* color: #98ff00 */ee.Geometry.Polygon(
[[[35.69382363692023, 1.4034169899773616],
[35.69382363692023, 1.2606333558875118],
[35.61691934004523, 1.0079975313237526],
[35.58945351973273, 0.6509798625215468],
[35.71030312910773, 0.35436075019447294],
[35.72128945723273, 0.18956774160826206],
[35.61691934004523, 0.18407460674896256],
[35.58945351973273, 0.13463632293582842],
[35.71030312910773, 0.04125265421470341],
[35.68283730879523, -0.0466379620709295],
[35.74875527754523, -0.18945988757796725],
[35.96848184004523, 0.05223897866641199],
[36.09482461348273, 0.002800509340276178],
[36.27060586348273, 0.2719645271288622],
[36.23215371504523, 0.45872822561768967],
[36.32004434004523, 0.6509798625215468],
[36.47934609785773, 0.8651943843139164],
[36.32004434004523, 0.9915205478901427],
[36.18271523848273, 1.1672705367627716],
[36.08933144942023, 1.1892385469740003],
[35.79270059004523, 1.6944479915417494]]]);
//print (Baringo2);
//Add Baringo
Map.addLayer(ee.Image().paint(Baringo2, 0, 2), {}, 'Baringo_county');
Map.centerObject(Baringo2);
//B) Filtering, masking and preparing bands of interest
//preprocess the Landsat 8 imagery by filtering it to the location of interest, masking clouds,
//and adding the variables in the model:
// This field contains UNIX time in milliseconds.
var timeField = 'system:time_start';
// Use this function to mask clouds in all Landsat imagery.
var maskClouds = function(image) {
var quality = image.select('BQA');
var cloud01 = quality.eq(61440);
var cloud02 = quality.eq(53248);
var cloud03 = quality.eq(28672);
var mask = cloud01.or(cloud02).or(cloud03).not();
return image.updateMask(mask);
};
// Use this function to add variables for NDVI, time and a constant
// to Landsat 8 imagery.
var addVariablesl8 = function(image) {
// Compute time in fractional years since the epoch.
var date = ee.Date(image.get(timeField));
var years = date.difference(ee.Date('1970-01-01'), 'year');
// Return the image with the added bands.
return image
// Add an NDVI band.
.addBands(image.normalizedDifference(['B5', 'B4']).rename('NDVI'))
.float()
// Add a time band.
.addBands(ee.Image(years).rename('t').float())
// Add a constant band.
.addBands(ee.Image.constant(1));
};
// Remove clouds, add variables and filter to the area of interest - landsat 8.
var filteredLandsatl8 = l8toa
.filterDate('2013-02-07', '2018-08-25')
.filterBounds(Baringo2)
.map(maskClouds)
.map(addVariablesl8);
// Cross-covariance is measuring the correspondence between a variable and a covariate at a lag.
//Create a lagged ImageCollection
var lag = function(leftCollection, rightCollection, lagDays) {
var filter = ee.Filter.and(
ee.Filter.maxDifference({
difference: 1000 * 60 * 60 * 24 * lagDays,
leftField: timeField,
rightField: timeField
}),
ee.Filter.greaterThan({
leftField: timeField,
rightField: timeField
}));
return ee.Join.saveAll({
matchesKey: 'images',
measureKey: 'delta_t',
ordering: timeField,
ascending: false, // Sort reverse chronologically
}).apply({
primary: leftCollection,
secondary: rightCollection,
condition: filter
});
};
//This function joins a collection to itself, using a filter that gets all the images before but within a specified time difference (in days) of each image.
//That list of previous images within the lag time is stored in a property of the image called images, sorted reverse chronologically.
//Compute cross covariance
//i) The covariance reducer expects a set of one-dimensional arrays as input.
//So pixel values corresponding to time t need to be stacked with pixel values at time t ? l as multiple bands in the same image.
var merge = function(image) {
// Function to be passed to iterate.
var merger = function(current, previous) {
return ee.Image(previous).addBands(current);
};
return ee.ImageCollection.fromImages(
image.get('images')).iterate(merger, image);
};
//...use that function to merge the bands from the lagged collection:
//Use a function to convert the merged bands to arrays with bands pt and ph, then reduce with the covariance reducer:
var covariance = function(mergedCollection, band, lagBand) {
return mergedCollection.select([band, lagBand]).map(function(image) {
return image.toArray();
}).reduce(ee.Reducer.covariance(), 8);
};
//is NDVI related in some way to the precipitation before the NDVI was observed?
//To estimate the strength of this relationship (in every pixel),
//load precipitation, join, merge, and reduce as previously:
// Load Precipitation data (covariate)
var chirps = ee.ImageCollection('UCSB-CHG/CHIRPS/PENTAD');
// Join the t-l (l=1 pentad) precipitation images to the Landsat.
var lag1PrecipNDVI = lag(filteredLandsatl8, chirps, 5);
// rainfall 5 days previous - aimed at annual grasses that respond quickly
// Add the precipitation images as bands.
var merged1PrecipNDVI = ee.ImageCollection(lag1PrecipNDVI.map(merge));
// Compute, visualise and display cross-covariance.
var cov1PrecipNDVI = covariance(merged1PrecipNDVI, 'NDVI', 'precipitation');
// create vizualization parameters
var viz = {min:-0.5, max:0.5, palette:['0000FF', '008000', 'FF0000']};
Map.addLayer(cov1PrecipNDVI.arrayGet([0, 1]).clip(Baringo2), viz, 'NDVI - PRECIP cov (lag = 5), Baringo');
//red is high cross covariance and blue is low covariance between NDVI and precipitation 5 days previously
// Export the cov1PrecipNDVI image, specifying scale and region.
Export.image.toDrive({
folder: 'Baringo_Remote_Sensing',
image: cov1PrecipNDVI,
description: 'NDVI - PRECIP cov (lag = 5)',
scale: 30,
region: Baringo2,
maxPixels: 1e10
});
Can anyone help me please?
Thank you.