Mongodb update nested array by key-value - arrays

How can I update nested array by list key value?
{
"_id": "mainId",
"events": [{
"id": 1,
"profile": 10,
} {
"id": 2,
"profile": 10,
} {
"id": 3,
"profile": 20,
}
]
}
and I have a list to update:
var list = {id: 2, newval: 222}, {id: 3, newval: 333}
How can I do an update in one query? Or in MongoDB, it will be like a loop?
for({id, val} in list){
update({_id: "mainId", events.$.id: id}, {setField: {events.$.profile: val}})
}

If you have a copy of the events array, you could make the necessary updates in your code and then send the updated array to MongoDB in a single query. Something like this
db.Test.updateOne({_id: "mainId"}, {$set: { "events": [{id: 1, profile: 222}, {id: 2, profile: 10}, {id: 3, profile: 333}] } } )
If you don't have a copy of the events array, you could do a bulk operation. Something like
db.Test.bulkWrite(
[
{ updateOne : {
"filter": {_id: "mainId", "events.id": 1},
"update": { $set: { "events.$.profile": 222 } }
}
},
{ updateOne : {
"filter": {_id: "mainId", "events.id": 3},
"update": { $set: { "events.$.profile": 333 }}
}
}
]
)
For more on bulkWrite, see the MongoDB docs: https://docs.mongodb.com/manual/core/bulk-write-operations/#bulkwrite-methods

Related

How to Update Array dict Elements in mongodb based on another field

How can I update a value in a document based on applying functions to another field (which is in a different embedded document)?
With the sample data below, I want to
get the col field for the farm having id 12
multiply that by 0.025
add the current value of the statistic.crypt field
ensure the value is a double by converting it with $toDouble
store the result back into statistic.crypt
data:
{
"_id": {
"$oid": "6128c238c144326c57444227"
},
"statistic": {
"balance": 112570,
"diamond": 14,
"exp": 862.5,
"lvl": 76,
"mn_exp": 2.5,
"lvl_mn_exp": 15,
"coll_ms": 8047,
"all_exp": 67057.8,
"rating": 0,
"crypt": 0
},
"inventory": {
"farm": [{
"id": 12,
"col": 100,
"currency": "diamond",
"cost": 2,
"date": "2021-09-02 18:58:39"
}, {
"id": 14,
"col": 1,
"currency": "diamond",
"cost": 2,
"date": "2021-09-02 16:57:08"
}],
"items": []
},
...
}
My initial attempt is:
self.collection
.update_many({"inventory.farm.id": 12}, [{
"$set": {
"test": {
'$toDouble': {
"$sum": [
{'$multiply':["$inventory.farm.$[].col", 0.025]},
'$test'
]
}
} }
},])
This does not work as it applies to test rather than statistic.crypt, and I cannot figure out how to modify it to apply to statistic.crypt.
A field can be updated based on another in the following stages:
add a field containing the farm
set statistic.crypt to the result of the mathematical expression (applied to the newly embedded farm)
remove extra fields
In code:
self.collection.update_many({"inventory.farm.id": 12 }, [
{
$addFields: {
hh: {
$filter: {
input: "$inventory.farm",
as: "z",
cond: { $eq: ["$$z.id", 12] },
},
},
},
},
{
$set: {
"statistic.crypt": {
$toDouble: {
$sum: [
{
$multiply: [{ $first: "$hh.col" }, 0.025],
},
"statistic.crypt",
],
},
},
},
},
{
$project: {
id_pr: 1,
id_server: 1,
role: 1,
warns: 1,
id_clan: 1,
statistic: 1,
design: 1,
date: 1,
inventory: 1,
voice: 1,
},
},)

How to perform update query on deeply nested JSON in mongodb?

I have a nested json data structure in mongodb which looks like:
{
'tid': 1,
'matches': [{
'dord': 1,
'matches': [{
'tord': 1,
'score': 11
},
{
'tord': 2,
'score': 12
}
]
},
{
'dord': 2,
'matches': [{
'tord': 1,
'score': 21
},
{
'tord': 2,
'score': 22
}
]
}]
}
I want to update the row with "dord": 1 and "tord": 1 and change value of score from 11 to 100. How do I do this?
What I already tried:
db.collection.update({'tid': 1}, {'matches': {$elemMatch: {'dord': 1}}}, {'matches': { $elemMatch: {'tord': 1}}}, {'score': 100})
Demo - https://mongoplayground.net/p/Mi2HnhzkPpE
https://docs.mongodb.com/manual/reference/operator/update/positional-filtered/
The filtered positional operator $[] identifies the array elements that match the arrayFilters conditions for an update operation
db.collection.update({ "tid": 1 },
{ $set: { "matches.$[m].matches.$[t].score": 100 } },
{
arrayFilters: [
{ "m.dord": 1 }, // to match where dord = 1
{ "t.tord": 1, "t.score": 11 } // and where tord = 1 and score = 11
]
})

How to normalize paginated data?

I need to convert a data like this:
{peopleList: [{id:1, name: 'joe'}, {id: 2, name: 'john'}], page: 1, rowPerPage: 8}
to this model:
{entities: {'0': {id: 0, name: 'joe'}, '1': {id: 1, name: 'john'}, page: 1, rowPerPage: 8}, result: [0, 1]}
but when I add this schema:
const people = new schema.Entity('peopleList');
const normalizedData = normalize(_data, { peopleList: [people] });
I get this output:
{
"entities": {
"peopleList": {
"1": {
"id": 1,
"name": "joe"
},
"2": {
"id": 2,
"name": "john"
}
}
},
"result": {
"peopleList": [
1,
2
],
"page": 1,
"rowPerPage": 8
}
}
I don't know exactly how to make a proper schema that create result filed as my desire. maybe the correct way is to have it in result and this output is correct. any idea?

How to loop array after process groupBy in React-Native

i want to grouping data JSON (One JSON) base on region. and the json after grouping like in (Two JSON). and i use the two JSON for show data (Result JSON). so, how to add loop base on region after grouping, because actually i want to show data in front end like (Result JSON):
==>One JSON
data:[
{id:1,
status: "active",
dataDetail: {
id: 5,
name: tes 1,
region: aaa,
}
},
{id:2,
status: "active",
dataDetail: {
id: 8,
name: tes 2,
region: bbb,
}
},
{id:3,
status: "active",
dataDetail: {
id: 8,
name: tes 3,
region: aaa,
}
}
]
==> Two JSON
aaa: [
{id:1,
status: "active",
dataDetail: {
id: 5,
name: tes 1,
region: aaa,
}
},
{id:3,
status: "active",
dataDetail: {
id: 8,
name: tes 3,
region: aaa,
}
}
],
bbb: [
{id:2,
status: "active",
dataDetail: {
id: 8,
name: tes 2,
region: bbb,
}
},
]
==> Result JSON
aaa:
1
3
bbb:
2
thanks
Using Lodash:
const jsonTwo = _.groupBy(data, instance => instance.dataDetail.region);
const resultJson = _.mapValues(jsonTwo, regionInstances => regionInstances.map(instance => instance.id));
Using plain javascript reduce functions:
const jsonTwo = data.reduce((accumulator, instance) => {
if(!accumulator[instance.dataDetail.region]) {
accumulator[instance.dataDetail.region] = [];
}
accumulator[instance.dataDetail.region].push(instance)
return accumulator;
},{});
const resultJson = data.reduce((accumulator, instance) => {
if(!accumulator[instance.dataDetail.region]) {
accumulator[instance.dataDetail.region] = [];
}
accumulator[instance.dataDetail.region].push(instance.id)
return accumulator;
},{});
var data =
[
{
"id": 1,
"status": "active",
"dataDetail": {
"id": 5,
"name": "tes 1",
"region": "aaa"
}
},
{
"id": 2,
"status": "active",
"dataDetail": {
"id": 8,
"name": "tes 2",
"region": "bbb"
}
},
{
"id": 3,
"status": "active",
"dataDetail": {
"id": 8,
"name": "tes 3",
"region": "aaa"
}
}
];
groups =_.chain(data).groupBy('dataDetail.region');
keys = groups.map( (value, key) => key);
values = groups.map( (value, key) => _.map(value, 'id'));
result = _.zipObject(keys, values);

Mongoose-MongoDb : doc.pull inconsistent when multiple pull

node v7.7.1
mongodb: 2.2.33,
mongoose: 4.13.7
Hello all,
i'm having this unexpected behaviour when trying to update a document with multiple pull request based on matching criterias. here is what i mean
my document schma looks like this
{
"_id": "5a1c0c37d1c8b6323860dfd0",
"ID": "1511781786844",
"main": {
"_id": "5a3c37bfc065e86a5c593967",
"plan": [
{
"field1": 1,
"field2": 1,
"_id": "5a3c30dfa479bb4b5887e56e",
"child": []
},
{
"field1": 1,
"field2": 2,
"_id": "5a3c30e1a479bb4b5887e5c",
"child": []
},
{
"field1": 1,
"field2": 3,
"_id": "5a3c37bfc065e86a5c593968",
"child": []
},
{
"field1": 1,
"field2": 4,
"_id": "5a3c37bfc065e86a5c593655",
"child": []
},
{
"field1": 1,
"field2": 5,
"_id": "5a3c30dfa479bb4b5887e56f",
"child": []
},
{
"field1": 1,
"field2": 6,
"_id": "5a3c30e1a479bb4b6887e545",
"child": []
},
{
"field1": 1,
"field2": 7,
"_id": "5a3c37bfc065e86a5c5939658",
"child": []
},
{
"field1": 2,
"field2": 2,
"_id": "5a3c37bfc065e86a5c593963",
"child": []
},
]
},
...
....
}
and this is my code to update the document:
Schema.findOne({ID: data.ID})
.then(function(doc) {
var array = doc.main.plan;
for (i = 0; i < array.length; i++) {
if ( array[i].field1=== 1 )) {
var id = array[i]._id;
console.log('pulling');
doc.pull( { _id: id });
}
}
doc.save().then(function(doc) {
console.log('saving');
// console.log(doc);
if (doc && doc.docID) {
return { success: true };
} else {
return { success: false, error: 'unknownError'}
}
})
}
now the issue is let's say my array has 7 objects that matches the test (array[i].theField === parseInt(updFields.theField)), when i run this and check the logs i see that it will basically pull half of the objects and do a save.
so i would get
pulling
pulling
pulling
pulling
save.
and then i have to run the code for the remaining 3 objects in the array and get
pulling
pulling
saving
so i have to run it a third time to completely clear the array.
need help get this working
thank you
So i created a little workaround by doing a recursive function to pull all with only one click using lodash functions. not pretty but it does the job.
const delObjArray = (doc, cond) => {
const checkField = cond.field;
const checkVal = cond.value;
_.forEach(doc, (value) => {
if (value && value[checkField] === checkVal) {
doc.pull({ _id: value._id });
}
});
const isFound = _.some(doc, { [checkField]: checkVal });
if (isFound) {
delObjArray(doc, cond);
} else {
return true;
}
return true;
};

Resources