Reputation: 783
I have some data stored in my MongoDB
. The "tags" fields is actually an array of Strings
, but we have to update our model to store more data with those tags
Current model document
{
"id" : "59e4aefd74f12800019ba565",
"title" : "This is a title",
"tags" : [
"59b02e6f6b28ce0001f8c0a8",
"59b031886b28ce0001f8c0af",
"59ba8c1a5047570001a3c078"
]
}
Desired model after update
{
"id" : "59e4aefd74f12800019ba565",
"title" : "This is a title",
"tags" : [
{
"id" : "5a82ff1d889a15000103b115",
"internalName" : "Día Mundial de la Television"
},
{
"id" : "59ba8c1a5047570001a3c078",
"internalName" : "menu"
},
{
"id" : "5a26ac73d0fc2e00017f286e",
"internalName" : "oid_asdf_asd"
}
],
}
Now tags is a embedded object (forget about internalName field). How can I update the tag field without losing those data? I've tried with $rename, but it doesn't work well with arrays
db.test.update({}, {$rename: {'tags': 'tags2.id'}})
Upvotes: 1
Views: 159
Reputation: 103435
Using the concepts from this very good answer, you could create a cursor from an aggregate operation that transforms the tags arrays by using the $map
operator, iterate the cursor and update your collection using bulkWrite
.
The aggregate operation follows:
var cursor = db.test.aggregate([
{
"$project": {
"tags": {
"$map": {
"input": "$tags",
"as": "el",
"in": {
"id": "$$el",
"internalName": { "$literal": "temp string" }
}
}
}
}
}
]);
And running the bulk update:
var bulkUpdateOps = [];
cursor.forEach(doc => {
const { _id, tags } = doc;
bulkUpdateOps.push({
"updateOne": {
"filter": { _id },
"update": { "$set": { tags } },
"upsert": true
}
});
if (bulkUpdateOps.length === 1000) {
db.test.bulkWrite(bulkUpdateOps);
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
db.test.bulkWrite(bulkUpdateOps);
}
Upvotes: 1