Within MongoDB we have the ability to create a data processing pipeline that will get executed against our data once.
public class Person
{
[BsonId]
[BsonRepresentation(BsonType.String)]
public string Id { get; set; }
public string FirstName { get; set; }
public string Surname { get; set; }
}
public class Peger
{
public int Count { get; set; }
public int Page { get; set; }
public int Size { get; set; }
public IEnumerable<Person> Items { get; set; }
}
class Program
{
static async Task Main(string[] args)
{
var client = new MongoClient();
var database = client.GetDatabase("pager_test");
var collection = database.GetCollection<Person>(nameof(Person));
int page = 1;
int pageSize = 5;
var results = await GetPagerResultAsync(page, pageSize, collection);
}
private static async Task<Peger> GetPagerResultAsync(int page, int pageSize, IMongoCollection<Person> collection)
{
// count facet, aggregation stage of count
var countFacet = AggregateFacet.Create("countFacet",
PipelineDefinition<Person, AggregateCountResult>.Create(new[]
{
PipelineStageDefinitionBuilder.Count<Person>()
}));
// data facet, we’ll use this to sort the data and do the skip and limiting of the results for the paging.
var dataFacet = AggregateFacet.Create("dataFacet",
PipelineDefinition<Person, Person>.Create(new[]
{
PipelineStageDefinitionBuilder.Sort(Builders<Person>.Sort.Ascending(x => x.Surname)),
PipelineStageDefinitionBuilder.Skip<Person>((page - 1) * pageSize),
PipelineStageDefinitionBuilder.Limit<Person>(pageSize),
}));
var filter = Builders<Person>.Filter.Empty;
var aggregation = await collection.Aggregate()
.Match(filter)
.Facet(countFacet, dataFacet)
.ToListAsync();
var count = aggregation.First()
.Facets.First(x => x.Name == "countFacet")
.Output<AggregateCountResult>()
?.FirstOrDefault()
?.Count ?? 0;
var data = aggregation.First()
.Facets.First(x => x.Name == "dataFacet")
.Output<Person>();
return new Pager
{
Count = (int)count / pageSize,
Size = pageSize,
Page = page,
Items = data
};
}
}