chore(http): unify the pagination of the index and documents route behind a common type

This commit is contained in:
Tamo 2022-06-02 13:31:46 +02:00
parent d6dd234914
commit 12b5eabd5d
No known key found for this signature in database
GPG Key ID: 20CD8020AFA88D69
4 changed files with 100 additions and 35 deletions

View File

@ -14,7 +14,7 @@ use mime::Mime;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde::Deserialize; use serde::Deserialize;
use serde_cs::vec::CS; use serde_cs::vec::CS;
use serde_json::{json, Value}; use serde_json::Value;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use crate::analytics::Analytics; use crate::analytics::Analytics;
@ -22,7 +22,7 @@ use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::{policies::*, GuardedData}; use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::payload::Payload; use crate::extractors::payload::Payload;
use crate::extractors::sequential_extractor::SeqHandler; use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{fold_star_or, StarOr}; use crate::routes::{fold_star_or, PaginationView, StarOr};
use crate::task::SummarizedTaskView; use crate::task::SummarizedTaskView;
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| { static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
@ -122,14 +122,12 @@ pub async fn delete_document(
Ok(HttpResponse::Accepted().json(task)) Ok(HttpResponse::Accepted().json(task))
} }
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct BrowseQuery { pub struct BrowseQuery {
#[serde(default)] #[serde(default)]
offset: usize, offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")] #[serde(default = "crate::routes::PAGINATION_DEFAULT_LIMIT")]
limit: usize, limit: usize,
fields: Option<CS<StarOr<String>>>, fields: Option<CS<StarOr<String>>>,
} }
@ -141,8 +139,8 @@ pub async fn get_all_documents(
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params); debug!("called with params: {:?}", params);
let BrowseQuery { let BrowseQuery {
offset,
limit, limit,
offset,
fields, fields,
} = params.into_inner(); } = params.into_inner();
let attributes_to_retrieve = fields.map(CS::into_inner).and_then(fold_star_or); let attributes_to_retrieve = fields.map(CS::into_inner).and_then(fold_star_or);
@ -151,10 +149,10 @@ pub async fn get_all_documents(
.documents(path.into_inner(), offset, limit, attributes_to_retrieve) .documents(path.into_inner(), offset, limit, attributes_to_retrieve)
.await?; .await?;
debug!("returns: {:?}", documents); let ret = PaginationView::new(offset, limit, total as usize, documents);
Ok(HttpResponse::Ok().json(json!(
{ "limit": limit, "offset": offset, "total": total, "results": documents } debug!("returns: {:?}", ret);
))) Ok(HttpResponse::Ok().json(ret))
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]

View File

@ -12,6 +12,8 @@ use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler; use crate::extractors::sequential_extractor::SeqHandler;
use crate::task::SummarizedTaskView; use crate::task::SummarizedTaskView;
use super::Pagination;
pub mod documents; pub mod documents;
pub mod search; pub mod search;
pub mod settings; pub mod settings;
@ -37,38 +39,22 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
); );
} }
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Paginate {
#[serde(default)]
offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
limit: usize,
}
pub async fn list_indexes( pub async fn list_indexes(
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>, data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
paginate: web::Query<Paginate>, paginate: web::Query<Pagination>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let search_rules = &data.filters().search_rules; let search_rules = &data.filters().search_rules;
let indexes: Vec<_> = data.list_indexes().await?; let indexes: Vec<_> = data.list_indexes().await?;
let nb_indexes = indexes.len(); let nb_indexes = indexes.len();
let indexes: Vec<_> = indexes let iter = indexes
.into_iter() .into_iter()
.filter(|i| search_rules.is_index_authorized(&i.uid)) .filter(|i| search_rules.is_index_authorized(&i.uid));
.skip(paginate.offset) let ret = paginate
.take(paginate.limit) .into_inner()
.collect(); .auto_paginate_unsized(nb_indexes, iter);
debug!("returns: {:?}", indexes); debug!("returns: {:?}", ret);
Ok(HttpResponse::Ok().json(json!({ Ok(HttpResponse::Ok().json(ret))
"results": indexes,
"offset": paginate.offset,
"limit": paginate.limit,
"total": nb_indexes,
})))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]

View File

@ -3,6 +3,7 @@ use std::str::FromStr;
use actix_web::{web, HttpResponse}; use actix_web::{web, HttpResponse};
use log::debug; use log::debug;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use time::OffsetDateTime; use time::OffsetDateTime;
use meilisearch_error::ResponseError; use meilisearch_error::ResponseError;
@ -58,6 +59,86 @@ pub fn fold_star_or<T>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<V
.collect() .collect()
} }
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Pagination {
#[serde(default)]
pub offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
pub limit: usize,
}
#[derive(Debug, Clone, Serialize)]
pub struct PaginationView<T> {
pub results: Vec<T>,
pub offset: usize,
pub limit: usize,
pub total: usize,
}
impl Pagination {
/// Given the full data to paginate, returns the selected section.
pub fn auto_paginate_sized<T>(
self,
content: impl IntoIterator<Item = T> + ExactSizeIterator,
) -> PaginationView<T>
where
T: Serialize,
{
let total = content.len();
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
self.format_with(total, content)
}
/// Given an iterator and the total number of elements, returns the selected section.
pub fn auto_paginate_unsized<T>(
self,
total: usize,
content: impl IntoIterator<Item = T>,
) -> PaginationView<T>
where
T: Serialize,
{
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
self.format_with(total, content)
}
/// Given the data already paginated + the total number of elements, it stores
/// everything in a [PaginationResult].
pub fn format_with<T>(self, total: usize, results: Vec<T>) -> PaginationView<T>
where
T: Serialize,
{
PaginationView {
results,
offset: self.offset,
limit: self.limit,
total,
}
}
}
impl<T> PaginationView<T> {
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
Self {
offset,
limit,
results,
total,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(clippy::large_enum_variant)] #[allow(clippy::large_enum_variant)]
#[serde(tag = "name")] #[serde(tag = "name")]

View File

@ -827,7 +827,7 @@ async fn add_larger_dataset() {
..Default::default() ..Default::default()
}) })
.await; .await;
assert_eq!(code, 200); assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 77); assert_eq!(response["results"].as_array().unwrap().len(), 77);
} }