Reputation: 3417
So I have some data which looks like this:
[
{
"_id": "5ba41d8c5f60a647fc792c28",
"key": "CPU Usage",
"signaler": "lookup",
"time": "2018-09-20T22:22:04.515Z",
"status": "alarm"
},
{
"_id": "5ba41d8c5f60a647fc792c2a",
"key": "NETWORK Usage",
"signaler": "engine",
"time": "2018-09-20T22:22:04.516Z",
"status": "warning"
},
{
"_id": "5ba41d8c5f60a647fc792c29",
"key": "NETWORK Usage",
"signaler": "engine",
"time": "2018-09-09T22:22:04.516Z",
"status": "alarm"
},
{
"_id": "5ba41d8c5f60a647fc792c2d",
"key": "CPU Usage",
"signaler": "evaluator",
"time": "2018-09-09T22:22:04.840Z",
"status": "alarm"
},
{
"_id": "5ba41d8c5f60a647fc792c2b",
"key": "RAM Usage",
"signaler": "engine",
"time": "2018-09-01T22:22:04.840Z",
"status": "alarm"
}
]
key
and signaler
can be any strings, status
must be one of alarm
, warning
or normal
.
I want to write an aggregation which groups on signaler
+ key
and tells me the total alarms and warnings for 3 durations: all time, last week and last month.
Expected output:
[
{
"_id": {
"signaler": "lookup",
"key": "CPU Usage"
},
"alarmsWeek": 1,
"warningsWeek": 0,
"alarmsMonth": 1,
"warningsMonth": 0,
"alarmsAllTime": 1,
"warningsAllTime": 0
},
{
"_id": {
"signaler": "engine",
"key": "Network Usage"
},
"alarmsWeek": 0,
"warningsWeek": 1,
"alarmsMonth": 1,
"warningsMonth": 1,
"alarmsAllTime": 1,
"warningsAllTime": 1
},
{
"_id": {
"signaler": "evaluator",
"key": "CPU Usage"
},
"alarmsWeek": 0,
"warningsWeek": 0,
"alarmsMonth": 1,
"warningsMonth": 0,
"alarmsAllTime": 1,
"warningsAllTime": 0
},
{
"_id": {
"signaler": "engine",
"key": "RAM Usage"
},
"alarmsWeek": 0,
"warningsWeek": 0,
"alarmsMonth": 0,
"warningsMonth": 0,
"alarmsAllTime": 1,
"warningsAllTime": 0
}
]
I know how to write the group stage which calculates the warnings and alarms for all time but I'm not sure how to do the time bucketing, especially since they are 'stacked' i.e the counts of last week will also be in the counts for last month.
The group step as far I understand:
[
{
"$group": {
"_id": {
"signaler":"$signaler",
"key": "$key"
},
"totalWarnings": {
"$sum": {
"$cond": [
{"$eq": [ "warning", "$level" ] },
1,
0
]
}
},
"totalAlarms": {
"$sum": {
"$cond": [
{"$eq": [ "alarm", "$level" ] },
1,
0
]
}
}
}
},
{
"$project": {
"_id": { "$concat": ["$_id.key", "+", "$_id.signaler"] },
"key": "$_id.key",
"signaler": "$_id.signaler",
"totalAlarms": 1,
"totalWarnings": 1
}
}
]
Upvotes: 0
Views: 195
Reputation: 4363
To illustrate Fanamy's answer, here's the group stage used to achieve this :
db.collection.aggregate([
{ "$group": {
"_id": { "signaler": "$signaler", "key": "$key" },
"alarmsWeek": {
"$sum": {
"$cond": {
"if": {
"$and": [
{ "$eq": ["$status", "alarm"] },
{ "$lt": ["$time", { "$subtract": [new Date(), { "$multiply": [1000, 3600, 24, 7] }] }] }
]
},
"then": 1,
"else": 0
}
}
},
"warningsWeek": {
"$sum": {
"$cond": {
"if": {
"$and": [
{ "$eq": ["$status", "warning"] },
{ "$lt": ["$time", { "$subtract": [new Date(), { "$multiply": [1000, 3600, 24, 7] }] }] }
]
},
"then": 1,
"else": 0
}
}
},
"alarmsMonth": {
"$sum": {
"$cond": {
"if": {
"$and": [
{ "$eq": ["$status", "alarm"] },
{ "$lt": ["$time", { "$subtract": [new Date(), { "$multiply": [1000, 3600, 24, 30] }] }] }
]
},
"then": 1,
"else": 0
}
}
},
"warningsMonth": {
"$sum": {
"$cond": {
"if": {
"$and": [
{ "$eq": ["$status", "warning"] },
{ "$lt": ["$time", { "$subtract": [new Date(), { "$multiply": [1000, 3600, 24, 30] }] }] }
]
},
"then": 1,
"else": 0
}
}
},
"alarmsAllTime": {
"$sum": { "$cond": { "if": { "$eq": ["$status", "alarm"] }, "then": 1, "else": 0 }}
},
"warningsAllTime": {
"$sum": { "$cond": { "if": { "$eq": ["$status", "warning"] }, "then": 1, "else": 0 }}
}
}}
])
Upvotes: 1
Reputation: 51
for the time buckets you could use https://docs.mongodb.com/manual/reference/method/Date/ in combination with $subtract (as it supports dates) https://docs.mongodb.com/manual/reference/operator/aggregation/subtract/
so simply take the current Date, subtract 7 days or 30 for a month (it wouldn't be really "month-correct") and check if the date of the document is lower than your created date.
Upvotes: 0