Database

db={"workspaces":[{"_id":"w1","workspace":"workspaceId1"},{"_id":"w2","workspace":"workspaceId2"}],"projects":[{"_id":"p1","workspace":"workspaceId1","deleted":false},{"_id":"p2","workspace":"workspaceId1","deleted":true},{"_id":"p3","workspace":"workspaceId2","deleted":false}],"items":[{"_id":"i1","project":"p1","type":"type1","deleted":false},{"_id":"i2","project":"p1","type":"type1","deleted":true},{"_id":"i3","project":"p1","type":"type2","deleted":false},{"_id":"i4","project":"p2","type":"type1","deleted":false},{"_id":"i5","project":"p3","type":"type1","deleted":false},{"_id":"i6","project":"p3","type":"type3","deleted":true},{"_id":"i7","project":"p1","type":"type3","deleted":false}]}

Query

db.projects.aggregate([{$match:{workspace:"workspaceId1"}},{$lookup:{from:"items",let:{id:"$_id"},pipeline:[{$match:{$expr:{$and:[{$eq:["$project","$$id"]},{$eq:["$deleted",false]}]}}},/** project only fields necessary for later pipelines to not overload* memory and to not get `exceeded memory limit for $group` error*/{$project:{_id:1,type:1,deleted:1}},],as:"items"}},/** Use $unwind here to optimize aggregation pipeline, see:* https://stackoverflow.com/questions/45724785/aggregate-lookup-total-size-of-documents-in-matching-pipeline-exceeds-maximum-d* Without $unwind we may get an `matching pipeline exceeds maximum document size` error.* Error appears not in all requests and it's really strange and hard to debug.*/{$unwind:"$items"},{$group:{_id:"$_id",itemType1Count:{$sum:{"$cond":{"if":{$eq:["$items.type","type1"]},"then":1,"else":0}}},itemType2Count:{$sum:{"$cond":{"if":{$eq:["$items.type","type2"]},"then":1,"else":0}}},itemType3Count:{$sum:{"$cond":{"if":{$eq:["$items.type","type3"]},"then":1,"else":0}}}}}])

Result