remove object from 2 layer nested JSONB[] column - database

Here I how my data is in the column
[
{
"exercises": [
{
"exercise_name": "test",
"exercise_video": {
"media_id": "id_1",
"video_url": "URL",
"media_name": "HIIT.mp4",
"video_play_at": 0,
"is_video_muted": true,
"video_duration": 0,
"video_play_duration": 120,
"is_video_play_in_loop": true
},
"show_countdown": true,
"show_to_participants": false,
"exercise_duration_in_seconds": 120
},
{
"exercise_name": "test1",
"exercise_video": {
"media_id": "id_2",
"video_url": "URL",
"media_name": "HIIT.mp4",
"video_play_at": 0,
"is_video_muted": true,
"video_duration": 0,
"video_play_duration": 120,
"is_video_play_in_loop": true
},
"show_countdown": true,
"show_to_participants": false,
"exercise_duration_in_seconds": 120
}
],
"segment_icon": "Barre",
"segment_name": "test",
"segment_duration_in_seconds": 120
}
]
I need to remove entire exercise_video object if media_id = id_2. Expected final result
[
{
"exercises": [
{
"exercise_name": "test",
"exercise_video": {
"media_id": "id_1",
"video_url": "URL",
"media_name": "HIIT.mp4",
"video_play_at": 0,
"is_video_muted": true,
"video_duration": 0,
"video_play_duration": 120,
"is_video_play_in_loop": true
},
"show_countdown": true,
"show_to_participants": false,
"exercise_duration_in_seconds": 120
},
{
"exercise_name": "test1",
"show_countdown": true,
"show_to_participants": false,
"exercise_duration_in_seconds": 120
}
],
"segment_icon": "Barre",
"segment_name": "test",
"segment_duration_in_seconds": 120
}
]
WITH updated_activity AS (
SELECT activity_id,
('{exercises,'||index-1||',exercise_video.media_id}')::text[] as path
FROM activity,
jsonb_array_elements(
(workout_builder_test->>'exercises')::jsonb->'exercise_video'
) with ordinality arr(x,index)
WHERE x->> 'media_id' = 'id_2'
)
UPDATE activity
SET workout_builder_test = workout_builder_test #- updated_activity.path
FROM updated_activity
WHERE activity.activity_id = updated_activity.activity_id
AND activity.gym_id = 'gym_id'

Your code is pretty close, just some problems with your paths. Here an example with paths mended:
SELECT activity_id,
jsonb_pretty( workout_builder_test #- ('{0,exercises,'||e.i-1/*index of part to remove*/||',exercise_video}')::text[])
from activity, jsonb_array_elements(workout_builder_test->0->'exercises') with ordinality as e(cont, i)
where e.cont->'exercise_video'->'media_id'='"id_2"';
working example here

Related

How do I crop faces based on bounding boxes in Tensorflow.js?

I need to crop faces which are detected in BlazeFace Model then send the image over to a custom model I made. I have already implemented the face detection with the bounding boxes but am stuck at cropping the face out.
I have the coordinates of the landmarks and the bottomRight and topLeft but I do not know how to do so. In python with tensorflow their exist functions to do so but with tensorflow.js I can't find anything for this.
Rendering Bounding Boxes on Face
const faces = await bfModel
.estimateFaces(tensor, returnTensors)
.catch(e => console.log(e));
console.log(faces);
// Faces is an array of objects
if (!isEmpty(faces)) {
setModelFaces({ faces });
}
const renderBoundingBoxes = () => {
const { faces } = modelFaces;
const scale = {
height: styles.camera.height / tensorDims.height,
width: styles.camera.width / tensorDims.width
};
const flipHorizontal = Platform.OS === "ios" ? false : true;
if (!isEmpty(faces)) {
return faces.map((face, i) => {
const { topLeft, bottomRight } = face;
const bbLeft = topLeft.dataSync()[0] * scale.width;
const boxStyle = Object.assign({}, styles.bbox, {
left: flipHorizontal
? previewWidth - bbLeft - previewLeft
: bbLeft + previewLeft,
top: topLeft.dataSync()[1] * scale.height + 20,
width:
(bottomRight.dataSync()[0] - topLeft.dataSync()[0]) * scale.width,
height:
(bottomRight.dataSync()[1] - topLeft.dataSync()[1]) * scale.height
});
return <View style={boxStyle}></View>;
1;
});
}
};
Output from console.log(faces):
Array [
Object {
"bottomRight": Tensor {
"dataId": Object {},
"dtype": "float32",
"id": 220600,
"isDisposedInternal": false,
"kept": false,
"rankType": "1",
"scopeId": 426282,
"shape": Array [
2,
],
"size": 2,
"strides": Array [],
},
"landmarks": Tensor {
"dataId": Object {},
"dtype": "float32",
"id": 220602,
"isDisposedInternal": false,
"kept": false,
"rankType": "2",
"scopeId": 426286,
"shape": Array [
6,
2,
],
"size": 12,
"strides": Array [
2,
],
},
"probability": Tensor {
"dataId": Object {},
"dtype": "float32",
"id": 220592,
"isDisposedInternal": false,
"kept": false,
"rankType": "1",
"scopeId": 426249,
"shape": Array [
1,
],
"size": 1,
"strides": Array [],
},
"topLeft": Tensor {
"dataId": Object {},
"dtype": "float32",
"id": 220599,
"isDisposedInternal": false,
"kept": false,
"rankType": "1",
"scopeId": 426280,
"shape": Array [
2,
],
"size": 2,
"strides": Array [],
},
},
]
image can be cropped using tf.image.cropAndResize. The tensor should be a 4d tensor. If the image is a 3d tensor, it first needs to be expanded. The crop expected height and width should be passed as argument to copAndResize
boxes = tf.concat([topLeftTensor, bottomRightTensor]).reshape([-1, 4])
crop = tf.image.cropAndResize(images, boxes, [0], [height, width])
I dont sure but it can work:
let img_4d = tf.expandDims(inputTF3D, axis=0)
const start = predictions[id].topLeft;
const end = predictions[id].bottomRight;
let boxes = tf.concat([start, end]);
boxes = tf.reshape(boxes,[-1, 4])
let crop = tf.image.cropAndResize(img_4d, boxes, [0], [IMAGE_HEIGHT, IMAGE_WIDTH])
https://github.com/tensorflow/tfjs/issues/3914

How to conditionally set a new field on all sub-documents in MongoDB collection

I'm using MongoDB version 3.4.9. I have the following MongoDB schema:
const schema = new mongoose.Schema({
reputationHistory: [ reputationHistorySchema ],
});
const reputationHistorySchema = new mongoose.Schema({
isBid: { type: Boolean, default: false, required: true },
isResult: { type: Boolean, default: false, required: true },
});
This is an example of what it looks like:
[{
reputationHistory: [{
isBid: true,
isResult: false,
}, {
isBid: false,
isResult: true,
}]
}, {
reputationHistory: [{
isBid: true,
isResult: false,
}, {
isBid: false,
isResult: true,
}]
}]
I would like to run an update command to produce the following. If isBid == true, then I want reason to be "Prediction". Else, reason should be "Result".
[{
reputationHistory: [{
isBid: true,
isResult: false,
reason: "Prediction",
}, {
isBid: false,
isResult: true,
reason: "Result",
}]
}, {
reputationHistory: [{
isBid: true,
isResult: false,
reason: "Prediction",
}, {
isBid: false,
isResult: true,
reason: "Result",
}]
}]
Below is my attempt:
db.users.update(
{},
{
$set: {
"reputationHistory.$[].reason": {
$cond: { if: { "reputationHistory.$[].isBid": true }, then: "Prediction", else: "Result" }
}
}
},
{ multi: true }
)
Below is the error I receive:
WriteResult({
"nMatched" : 0,
"nUpserted" : 0,
"nModified" : 0,
"writeError" : {
"code" : 16837,
"errmsg" : "cannot use the part (reputationHistory of reputationHistory.$[].reason) to traverse the element ({reputationHistory: [ { isBid: true, isResult: false, _id: ObjectId('5e55042c097bca0004647e18') } ]})"
}
})
How can I edit my MongoDB query to achieve my goal?
MongoDB 4.2 allows you to run Updates with Aggregation Pipeline, try:
db.users.updateMany({}, [
{
$addFields: {
reputationHistory: {
$map: {
input: "$reputationHistory",
in: {
$mergeObjects: [
"$$this",
{ reason: { $cond: { if: { $eq: [ "$$this.isBid", true ] }, then: "Prediction", else: "Result" } } }
]
}
}
}
}
}
])
If you can't upgrate the you can use $out which will effectively replace your existing collection with aggregation result:
db.users.aggregate([
{
$addFields: {
reputationHistory: {
$map: {
input: "$reputationHistory",
in: {
reason: { $cond: { if: { $eq: [ "$$this.isBid", true ] }, then: "Prediction", else: "Result" } },
isBid: "$$this.isBid",
isResult: "$$this.isResult"
}
}
}
}
},
{
$out: "users"
}
])

How to shrink the storage size limit of IPFS?

I am using IPFS and trying to understand its structure.
ISSUE: 1
The issue I am facing is that when I am trying to shrink the storage size of IPFS (for testing it is taking so long time, hence I just want to shrink File System for a while) but is unable to shrink.
I tried to configure the config file by changing the StorageMax limit
using this command
ipfs config Datastore.StorageMax 1GB
but still it does not work, it accepting (ipfs add ) the files after the 1GB also, why he is not limiting the file-storage?
anyone, please suggest me How to reduce file-system size?
ISSUE: 2
Even in case of IPFS's default storage limit (Which is 10 GB), I saw when I am adding the files on IPFS it is not stopping to store files after crosses 10GB (which was limit).
It should not be cross after the limit ends, for this what to do?
below is the config file
{
"API": {
"HTTPHeaders": {}
},
"Addresses": {
"API": "/ip4/0.0.0.0/tcp/5001",
"Announce": [],
"Gateway": "/ip4/0.0.0.0/tcp/8080",
"NoAnnounce": [],
"Swarm": [
"/ip4/0.0.0.0/tcp/4001",
"/ip6/::/tcp/4001"
]
},
"Bootstrap": [
"/dnsaddr/bootstrap.libp2p.io/ipfs/QmNnooDu7bfjPFoTZYxMWUQJyrVwtbZg5gBMjTezGAJN",
"/dnsaddr/bootstrap.libp2p.io/ipfs/QmQCU2EcMqAqQPR2i9bChGNJchTbq5TbXJJ16u19uLTa",
"/dnsaddr/bootstrap.libp2p.io/ipfs/QmbLHAnMoJPWSCR5Zhtx6BX9KiKNN6tpvbUcqanj75Nb",
"/dnsaddr/bootstrap.libp2p.io/ipfs/QmcZf59bWwK5XFi76CZX8cbBhTzzA3gU1ZjYZcYW3dwt",
"/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEQAwe3N8SzbUtfsmvsqQLuvuJ",
"/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKDZT2MULpjvfd3aZ6ha4oFGL1KrGM",
"/ip4/128.199.219.111/tcp/4001/ipfs/QmSoLSafTMBsPKadTEgaXDQVcqN88CNLHXMkTNwMKPnu",
"/ip4/104.236.76.40/tcp/4001/ipfs/QmSoLV4Bbm51jM9C4gDYZCy3U6aXMJDAbzgu2fzaDs64",
"/ip4/178.62.158.247/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPakiS1J6DifTC88f5uVQKNAd",
"/ip6/2604:a880:1:20::203:d001/tcp/4001/ipfs/QmSoLPppuBtQSGwKDZT2M7Lpjvfd3aZ6ha4oFGL1KrGM",
"/ip6/2400:6180:0:d0::151:6001/tcp/4001/ipfs/QmSoLSafTMBsPKadTEgaXctVcqN88CNLHXMkTNwMKPnu",
"/ip6/2604:a880:800:10::4a:5001/tcp/4001/ipfs/QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64",
"/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd"
],
"Datastore": {
"BloomFilterSize": 0,
"GCPeriod": "1h",
"HashOnRead": false,
"Spec": {
"mounts": [
{
"child": {
"path": "blocks",
"shardFunc": "/repo/flatfs/shard/v1/next-to-last/2",
"sync": true,
"type": "flatfs"
},
"mountpoint": "/blocks",
"prefix": "flatfs.datastore",
"type": "measure"
},
{
"child": {
"compression": "none",
"path": "datastore",
"type": "levelds"
},
"mountpoint": "/",
"prefix": "leveldb.datastore",
"type": "measure"
}
],
"type": "mount"
},
"StorageGCWatermark": 0,
"StorageMax": "1GB"
},
"Discovery": {
"MDNS": {
"Enabled": true,
"Interval": 10
}
},
"Experimental": {
"FilestoreEnabled": false,
"Libp2pStreamMounting": false,
"P2pHttpProxy": false,
"QUIC": false,
"ShardingEnabled": false,
"UrlstoreEnabled": false
},
"Gateway": {
"APICommands": [],
"HTTPHeaders": {
"Access-Control-Allow-Headers": [
"X-Requested-With",
"Range"
],
"Access-Control-Allow-Methods": [
"GET"
],
"Access-Control-Allow-Origin": [
"*"
]
},
"PathPrefixes": [],
"RootRedirect": "",
"Writable": false
},
"Identity": {
"PeerID": "QmfB8xVzjndgMWHuxszYADGrwBo1Zx6zjBQQCsqARDupsW"
},
"Ipns": {
"RecordLifetime": "",
"RepublishPeriod": "",
"ResolveCacheSize": 128
},
"Mounts": {
"FuseAllowOther": false,
"IPFS": "/ipfs",
"IPNS": "/ipns"
},
"Pubsub": {
"DisableSigning": false,
"Router": "",
"StrictSignatureVerification": false
},
"Reprovider": {
"Interval": "12h",
"Strategy": "all"
},
"Routing": {
"Type": "dht"
},
"Swarm": {
"AddrFilters": null,
"ConnMgr": {
"GracePeriod": "20s",
"HighWater": 900,
"LowWater": 600,
"Type": "basic"
},
"DisableBandwidthMetrics": false,
"DisableNatPortMap": false,
"DisableRelay": false,
"EnableRelayHop": false
}
}
It looks like StorageMax does not actually limit the size of the IPFS node, instead it's used to determine whether or not to run garbage collection. IPFS will write until the disk is full.

jsonb query in postgres

I've a table in postgres named: op_user_event_data, which has a column named data, where I store a jsonb, and what I have at the moment is a json like this:
{
"aisles": [],
"taskGroups": [
{
"index": 0,
"tasks": [
{
"index": 1,
"mandatory": false,
"name": "Dados de Linear",
"structuresType": null,
"lines": [
{
"sku": {
"skuId": 1,
"skuName": "Limiano Bola",
"marketId": [
1,
3,
10,
17
],
"productId": 15,
"brandId": [
38,
44
]
},
"taskLineFields": [
{
"tcv": {
"value": "2126474"
},
"columnType": "skuLocalCode",
"columnId": 99
},
{
"tcv": {
"value": null
},
"columnType": "face",
"columnId": 29
},
]
},
{
"sku": {
"skuId": 3,
"skuName": "Limiano Bolinha",
"marketId": [
1,
3,
10,
17
],
"productId": 15,
"brandId": [
38,
44
]
},
"taskLineFields": [
{
"tcv": {
"value": "2545842"
},
"columnType": "skuLocalCode",
"columnId": 99
},
{
"tcv": {
"value": null
},
"columnType": "face",
"columnId": 29
},
]
},
{
"sku": {
"skuId": 5,
"skuName": "Limiano Bola 1/2",
"marketId": [
1,
3,
10,
17
],
"productId": 15,
"brandId": [
38,
44
]
},
"taskLineFields": [
{
"tcv": {
"value": "5127450"
},
"columnType": "skuLocalCode",
"columnId": 99
},
{
"tcv": {
"value": "5.89"
},
"columnType": "rsp",
"columnId": 33
}
]
}
Basically I've an object which has
Aisles [],
taskGroups,
id and name.
Inside the taskGroups as shown in the json, one of the atributes is tasks which is an array, that also have an array called lines which have an array of sku and tasklines.
Basically:
taskGroups -> tasks -> lines -> sku or taskLineFields.
I've tried different queries to get the sku but when I try to get anything further than 'lines' it just came as blank or in some other tries 'cannot call elements from scalar'
Can anyone help me with this issue? Note this is just a sample json.
Anyone knows how make this to work:
I Want all lines where lines->taskLineFields->columnType = 'offer'
All I can do is this, but throwing error on scalar:
SELECT lines->'sku' Produto, lines->'taskLineFields'->'tcv'->>'value' ValorOferta
FROM sd_bel.op_user_event_data,
jsonb_array_elements(data->'taskGroups') taskgroups,
jsonb_array_elements(taskgroups->'tasks') tasks,
jsonb_array_elements(tasks->'columns') columns,
jsonb_array_elements(tasks->'lines') lines
WHERE created_by = 'belteste'
AND lines->'taskLineFields'->>'columnType' = 'offer'
say your data is in some json_column in your table
with t as (
select json_column as xyz from table
),
tg as ( select json_array_elements(xyz->'taskGroups') taskgroups from t),
tsk as (select json_array_elements(taskgroups->'tasks') tasks from tg)
select json_array_elements(tasks->'lines') -> 'sku' as sku from tsk;

Combining Column charts and line charts with the same same data in the same container(Highcharts)

I want to build a combination chart with a column chart with multiple series and a line chart. Problem is that I am getting High charts data from nested JSON response. For that I initialized array and that array is giving in series in plotoptions highcharts as you can see in the below code.
My code is like this:
var crime_data=[];
for(var i=0;i<result.themes.length;i++){
var crime={};
var test2 = result.themes[i];
var test = test2[Object.keys(test2)];
crime.name = Object.keys(result.themes[i]);
crime.data = [];
for(var k=0;k<test.yearTheme.length;k++){
var test3=test.yearTheme[k];
var test5=test3.individualValueVariable;
for(var j=0;j<test5.length;j++){
crime.data.push(test5[j].count);
};
};
crime_data.push(crime);
};
var crimeChart = new Highcharts.Chart({
chart: {
renderTo: 'container1',
type:'column'
},
title: {
text: 'Crime'
},
xAxis: {
categories: month,
crosshair: true
},
yAxis: {
min: 0,
title: {
text: 'Count'
}
},
credits: {
enabled: false
},
tooltip: {
shared: true,
},
plotOptions: {
column: {
pointPadding: 0.2,
borderWidth: 0,
depth: 25,
allowPointSelect: true,
cursor: 'pointer',
point: {
},
}
},
series: crime_data
});
This is Column chart I am getting when i write chart type column.
This is my Line Chart I am getting when i changed type column to spline in chart in highcharts.
And this is my JSON data(Highcharts data):
{
"boundaries": {
"boundary": [
{
"boundaryId": "55083021003",
"boundaryType": "USA_CITY",
"boundaryRef": "C1"
}
]
},
"themes": [
{
"AssaultCrimeTheme": {
"boundaryRef": "C1",
"individualValueVariable": [
{
"name": "2013 Assault Crime",
"description": "Assault Crime for 2013",
"count": 18901
},
{
"name": "2014 Assault Crime",
"description": "Assault Crime for 2014",
"count": 17707
}
]
}
},
{
"BurglaryCrimeTheme": {
"boundaryRef": "C1",
"individualValueVariable": [
{
"name": "2013 Burglary Crime",
"description": "Burglary Crime for 2013",
"count": 17743
},
{
"name": "2014 Burglary Crime",
"description": "Burglary Crime for 2014",
"count": 14242
}
]
}
}
]
}
I want to combine both of them in the same container with same data.The problem is in how to tell highcharts multiple series should be represented with line and with column type with same data.For this when i write series:[{ data: crime_data ,type: spline }] instead of series:crime_data In that case I am not getting Highcharts data. Can anyone Please help me how should i do this.Please suggest me.
Pass your data, like below format. add type of chart in each data series;
Here i replaced type value but with same data.
[{
type: 'line',
name: 'AssaultCrimeTheme',
data: [3, 2, 1, 3, 4]
}, {
type: 'line',
name: 'BurglaryCrimeTheme',
data: [2, 3, 5, 7, 6]
}, {
type: 'column',
name: 'AssaultCrimeTheme',
data: [3, 2, 1, 3, 4]
}, {
type: 'column',
name: 'BurglaryCrimeTheme',
data: [2, 3, 5, 7, 6]
},]
Here is fiddle for more details.
Here is a complete example using your data.
const json = {
"boundaries": {
"boundary": [{
"boundaryId": "55083021003",
"boundaryType": "USA_CITY",
"boundaryRef": "C1"
}]
},
"themes": [{
"AssaultCrimeTheme": {
"boundaryRef": "C1",
"individualValueVariable": [{
"name": "2013 Assault Crime",
"description": "Assault Crime for 2013",
"count": 18901
}, {
"name": "2014 Assault Crime",
"description": "Assault Crime for 2014",
"count": 17707
}]
}
}, {
"BurglaryCrimeTheme": {
"boundaryRef": "C1",
"individualValueVariable": [{
"name": "2013 Burglary Crime",
"description": "Burglary Crime for 2013",
"count": 17743
}, {
"name": "2014 Burglary Crime",
"description": "Burglary Crime for 2014",
"count": 14242
}]
}
}]
}
// Create categories object in order filter duplicates
const cats = {}
const series = json.themes.map((o) => {
const key = Object.keys(o)[0]
return {
name: key,
data: o[key].individualValueVariable.map((o) => {
cats[o.name] = 1
return { category: o.name, y: o.count }
})
}
})
// Convert categories object to array
const categories = Object.keys(cats)
// Chart options
const options = {
chart: {type: 'column'},
xAxis: {categories: categories},
series: series
}
// Create chart
const chart = Highcharts.chart('container', options)
console.log(series, categories)
Live example: https://jsfiddle.net/Lo323gq3/
Output below:

Resources