mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-04 18:45:46 +01:00
Merge branch 'add-index-name-to-sort-error-message' of github.com:cisco877/meilisearch into add-index-name-to-sort-error-message
This commit is contained in:
commit
d40a982434
@ -56,7 +56,7 @@ fn main() {
|
||||
Some(path) => TempDir::new_in(path).unwrap(),
|
||||
None => TempDir::new().unwrap(),
|
||||
};
|
||||
let index = Index::new(options, tempdir.path()).unwrap();
|
||||
let index = Index::new("", options, tempdir.path()).unwrap();
|
||||
let indexer_config = IndexerConfig::default();
|
||||
let index_documents_config = IndexDocumentsConfig::default();
|
||||
|
||||
|
@ -103,7 +103,7 @@ impl ReopenableIndex {
|
||||
return Ok(());
|
||||
}
|
||||
map.unavailable.remove(&self.uuid);
|
||||
map.create(&self.uuid, path, None, self.enable_mdb_writemap, self.map_size)?;
|
||||
map.create("", &self.uuid, path, None, self.enable_mdb_writemap, self.map_size)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -167,6 +167,7 @@ impl IndexMap {
|
||||
///
|
||||
pub fn create(
|
||||
&mut self,
|
||||
name: &str,
|
||||
uuid: &Uuid,
|
||||
path: &Path,
|
||||
date: Option<(OffsetDateTime, OffsetDateTime)>,
|
||||
@ -176,7 +177,7 @@ impl IndexMap {
|
||||
if !matches!(self.get_unavailable(uuid), Missing) {
|
||||
panic!("Attempt to open an index that was unavailable");
|
||||
}
|
||||
let index = create_or_open_index(path, date, enable_mdb_writemap, map_size)?;
|
||||
let index = create_or_open_index(name, path, date, enable_mdb_writemap, map_size)?;
|
||||
match self.available.insert(*uuid, index.clone()) {
|
||||
InsertionOutcome::InsertedNew => (),
|
||||
InsertionOutcome::Evicted(evicted_uuid, evicted_index) => {
|
||||
@ -296,6 +297,7 @@ impl IndexMap {
|
||||
/// Create or open an index in the specified path.
|
||||
/// The path *must* exist or an error will be thrown.
|
||||
fn create_or_open_index(
|
||||
name: &str,
|
||||
path: &Path,
|
||||
date: Option<(OffsetDateTime, OffsetDateTime)>,
|
||||
enable_mdb_writemap: bool,
|
||||
@ -309,9 +311,9 @@ fn create_or_open_index(
|
||||
}
|
||||
|
||||
if let Some((created, updated)) = date {
|
||||
Ok(Index::new_with_creation_dates(options, path, created, updated)?)
|
||||
Ok(Index::new_with_creation_dates(name, options, path, created, updated)?)
|
||||
} else {
|
||||
Ok(Index::new(options, path)?)
|
||||
Ok(Index::new(name, options, path)?)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,6 +182,7 @@ impl IndexMapper {
|
||||
// This is very unlikely to happen in practice.
|
||||
// TODO: it would be better to lazily create the index. But we need an Index::open function for milli.
|
||||
let index = self.index_map.write().unwrap().create(
|
||||
&name,
|
||||
&uuid,
|
||||
&index_path,
|
||||
date,
|
||||
@ -371,6 +372,7 @@ impl IndexMapper {
|
||||
let index_path = self.base_path.join(uuid.to_string());
|
||||
|
||||
break index_map.create(
|
||||
name,
|
||||
&uuid,
|
||||
&index_path,
|
||||
None,
|
||||
|
@ -53,8 +53,8 @@ pub async fn search(
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid_string = index_uid.into_inner();
|
||||
let index_uid = IndexUid::try_from(index_uid_string.clone())?;
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let query = params.into_inner();
|
||||
debug!(parameters = ?query, "Facet search");
|
||||
|
||||
@ -72,14 +72,7 @@ pub async fn search(
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let features = index_scheduler.features();
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_facet_search(
|
||||
&index,
|
||||
&index_uid_string,
|
||||
search_query,
|
||||
facet_query,
|
||||
facet_name,
|
||||
features,
|
||||
)
|
||||
perform_facet_search(&index, search_query, facet_query, facet_name, features)
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
@ -203,10 +203,9 @@ pub async fn search_with_url_query(
|
||||
|
||||
let distribution = embed(&mut query, index_scheduler.get_ref(), &index).await?;
|
||||
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, &index_uid.to_string(), query, features, distribution)
|
||||
})
|
||||
.await?;
|
||||
let search_result =
|
||||
tokio::task::spawn_blocking(move || perform_search(&index, query, features, distribution))
|
||||
.await?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
@ -243,10 +242,9 @@ pub async fn search_with_post(
|
||||
|
||||
let distribution = embed(&mut query, index_scheduler.get_ref(), &index).await?;
|
||||
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, &index_uid.to_string(), query, features, distribution)
|
||||
})
|
||||
.await?;
|
||||
let search_result =
|
||||
tokio::task::spawn_blocking(move || perform_search(&index, query, features, distribution))
|
||||
.await?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
|
@ -54,8 +54,6 @@ pub async fn multi_search_with_post(
|
||||
{
|
||||
debug!(on_index = query_index, parameters = ?query, "Multi-search");
|
||||
|
||||
let index_uid_name = index_uid.to_string();
|
||||
|
||||
// Check index from API key
|
||||
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
||||
return Err(AuthenticationError::InvalidToken).with_index(query_index);
|
||||
@ -82,13 +80,13 @@ pub async fn multi_search_with_post(
|
||||
.with_index(query_index)?;
|
||||
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, &index_uid_name.clone(), query, features, distribution)
|
||||
perform_search(&index, query, features, distribution)
|
||||
})
|
||||
.await
|
||||
.with_index(query_index)?;
|
||||
|
||||
search_results.push(SearchResultWithIndex {
|
||||
index_uid: index_uid.to_string(),
|
||||
index_uid: index_uid.into_inner(),
|
||||
result: search_result.with_index(query_index)?,
|
||||
});
|
||||
}
|
||||
|
@ -217,7 +217,6 @@ impl SearchQueryWithIndex {
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
} = self;
|
||||
|
||||
(
|
||||
index_uid,
|
||||
SearchQuery {
|
||||
@ -379,13 +378,12 @@ pub fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) {
|
||||
|
||||
fn prepare_search<'t>(
|
||||
index: &'t Index,
|
||||
index_uid: &'t String,
|
||||
rtxn: &'t RoTxn,
|
||||
query: &'t SearchQuery,
|
||||
features: RoFeatures,
|
||||
distribution: Option<DistributionShift>,
|
||||
) -> Result<(milli::Search<'t>, bool, usize, usize), MeilisearchHttpError> {
|
||||
let mut search = index.search(rtxn, &index_uid);
|
||||
let mut search = index.search(rtxn);
|
||||
|
||||
if query.vector.is_some() {
|
||||
features.check_vector("Passing `vector` as a query parameter")?;
|
||||
@ -488,7 +486,6 @@ fn prepare_search<'t>(
|
||||
|
||||
pub fn perform_search(
|
||||
index: &Index,
|
||||
index_uid: &String,
|
||||
query: SearchQuery,
|
||||
features: RoFeatures,
|
||||
distribution: Option<DistributionShift>,
|
||||
@ -497,7 +494,7 @@ pub fn perform_search(
|
||||
let rtxn = index.read_txn()?;
|
||||
|
||||
let (search, is_finite_pagination, max_total_hits, offset) =
|
||||
prepare_search(index, &index_uid, &rtxn, &query, features, distribution)?;
|
||||
prepare_search(index, &rtxn, &query, features, distribution)?;
|
||||
|
||||
let milli::SearchResult { documents_ids, matching_words, candidates, document_scores, .. } =
|
||||
match &query.hybrid {
|
||||
@ -720,7 +717,6 @@ pub fn perform_search(
|
||||
|
||||
pub fn perform_facet_search(
|
||||
index: &Index,
|
||||
index_uid: &String,
|
||||
search_query: SearchQuery,
|
||||
facet_query: Option<String>,
|
||||
facet_name: String,
|
||||
@ -728,8 +724,8 @@ pub fn perform_facet_search(
|
||||
) -> Result<FacetSearchResult, MeilisearchHttpError> {
|
||||
let before_search = Instant::now();
|
||||
let rtxn = index.read_txn()?;
|
||||
let (search, _, _, _) =
|
||||
prepare_search(index, &index_uid, &rtxn, &search_query, features, None)?;
|
||||
|
||||
let (search, _, _, _) = prepare_search(index, &rtxn, &search_query, features, None)?;
|
||||
let mut facet_search =
|
||||
SearchForFacetValues::new(facet_name, search, search_query.hybrid.is_some());
|
||||
if let Some(facet_query) = &facet_query {
|
||||
|
@ -882,7 +882,7 @@ async fn sort_unsortable_attribute() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.",
|
||||
"message": "Attribute `title` of index `test` is not sortable. Available sortable attributes are: `id`.",
|
||||
"code": "invalid_search_sort",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_sort"
|
||||
|
@ -267,7 +267,7 @@ fn export_a_dump(
|
||||
for result in index_mapping.iter(&rtxn)? {
|
||||
let (uid, uuid) = result?;
|
||||
let index_path = db_path.join("indexes").join(uuid.to_string());
|
||||
let index = Index::new(EnvOpenOptions::new(), &index_path).with_context(|| {
|
||||
let index = Index::new("", EnvOpenOptions::new(), &index_path).with_context(|| {
|
||||
format!("While trying to open the index at path {:?}", index_path.display())
|
||||
})?;
|
||||
|
||||
|
@ -41,7 +41,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||
options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
|
||||
|
||||
std::fs::create_dir_all(&index_path).unwrap();
|
||||
let index = Index::new(options, index_path).unwrap();
|
||||
let index = Index::new(program_name.as_str(), options, index_path).unwrap();
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
|
||||
let config = IndexerConfig::default();
|
||||
|
@ -28,7 +28,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
|
||||
|
||||
let index = Index::new(options, dataset)?;
|
||||
let index = Index::new(program_name.as_str(), options, dataset)?;
|
||||
let txn = index.read_txn()?;
|
||||
let mut query = String::new();
|
||||
while stdin().read_line(&mut query)? > 0 {
|
||||
|
@ -10,7 +10,7 @@ fn main() {
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
|
||||
|
||||
let index = Index::new(options, "data_movies.ms").unwrap();
|
||||
let index = Index::new("", options, "data_movies.ms").unwrap();
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
|
||||
let config = IndexerConfig::default();
|
||||
|
@ -452,7 +452,7 @@ impl std::fmt::Display for FaultSource {
|
||||
|
||||
#[test]
|
||||
fn conditionally_lookup_for_error_message() {
|
||||
let prefix = "Attribute `name` is not sortable.";
|
||||
let prefix = "Attribute `name` of index `index` is not sortable.";
|
||||
let messages = vec![
|
||||
(BTreeSet::new(), "This index does not have configured sortable attributes."),
|
||||
(BTreeSet::from(["age".to_string()]), "Available sortable attributes are: `age`."),
|
||||
@ -461,6 +461,7 @@ fn conditionally_lookup_for_error_message() {
|
||||
for (list, suffix) in messages {
|
||||
let err = UserError::InvalidSortableAttribute {
|
||||
field: "name".to_string(),
|
||||
index: "index".to_string(),
|
||||
valid_fields: list,
|
||||
hidden_fields: false,
|
||||
};
|
||||
|
@ -100,6 +100,8 @@ pub mod db_name {
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Index {
|
||||
pub name: String,
|
||||
|
||||
/// The LMDB environment which this index is associated with.
|
||||
pub(crate) env: heed::Env,
|
||||
|
||||
@ -171,6 +173,7 @@ pub struct Index {
|
||||
|
||||
impl Index {
|
||||
pub fn new_with_creation_dates<P: AsRef<Path>>(
|
||||
name: &str,
|
||||
mut options: heed::EnvOpenOptions,
|
||||
path: P,
|
||||
created_at: OffsetDateTime,
|
||||
@ -180,7 +183,8 @@ impl Index {
|
||||
|
||||
options.max_dbs(25);
|
||||
|
||||
let env = options.open(path)?;
|
||||
let name = String::from(name);
|
||||
let env = options.open(path)?;
|
||||
let mut wtxn = env.write_txn()?;
|
||||
let main = env.database_options().name(MAIN).create(&mut wtxn)?;
|
||||
let word_docids = env.create_database(&mut wtxn, Some(WORD_DOCIDS))?;
|
||||
@ -229,6 +233,7 @@ impl Index {
|
||||
Index::set_creation_dates(&env, main, created_at, updated_at)?;
|
||||
|
||||
Ok(Index {
|
||||
name,
|
||||
env,
|
||||
main,
|
||||
external_documents_ids,
|
||||
@ -258,9 +263,9 @@ impl Index {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new<P: AsRef<Path>>(options: heed::EnvOpenOptions, path: P) -> Result<Index> {
|
||||
pub fn new<P: AsRef<Path>>(name: &str, options: heed::EnvOpenOptions, path: P) -> Result<Index> {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Self::new_with_creation_dates(options, path, now, now)
|
||||
Self::new_with_creation_dates(name, options, path, now, now)
|
||||
}
|
||||
|
||||
fn set_creation_dates(
|
||||
@ -1212,8 +1217,8 @@ impl Index {
|
||||
FacetDistribution::new(rtxn, self)
|
||||
}
|
||||
|
||||
pub fn search<'a>(&'a self, rtxn: &'a RoTxn, index_uid: &'a String) -> Search<'a> {
|
||||
Search::new(rtxn, self, &index_uid)
|
||||
pub fn search<'a>(&'a self, rtxn: &'a RoTxn) -> Search<'a> {
|
||||
Search::new(rtxn, self)
|
||||
}
|
||||
|
||||
/// Returns the index creation time.
|
||||
@ -1548,7 +1553,7 @@ pub(crate) mod tests {
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(size);
|
||||
let _tempdir = TempDir::new_in(".").unwrap();
|
||||
let inner = Index::new(options, _tempdir.path()).unwrap();
|
||||
let inner = Index::new("temp", options, _tempdir.path()).unwrap();
|
||||
let indexer_config = IndexerConfig::default();
|
||||
let index_documents_config = IndexDocumentsConfig::default();
|
||||
Self { inner, indexer_config, index_documents_config, _tempdir }
|
||||
|
@ -115,7 +115,6 @@ impl<'a> Search<'a> {
|
||||
// TODO: find classier way to achieve that than to reset vector and query params
|
||||
// create separate keyword and semantic searches
|
||||
let mut search = Search {
|
||||
index_uid: &self.index_uid,
|
||||
query: self.query.clone(),
|
||||
vector: self.vector.clone(),
|
||||
filter: self.filter.clone(),
|
||||
|
@ -36,7 +36,6 @@ pub mod hybrid;
|
||||
pub mod new;
|
||||
|
||||
pub struct Search<'a> {
|
||||
index_uid: &'a String,
|
||||
query: Option<String>,
|
||||
vector: Option<Vec<f32>>,
|
||||
// this should be linked to the String in the query
|
||||
@ -58,9 +57,8 @@ pub struct Search<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Search<'a> {
|
||||
pub fn new(rtxn: &'a heed::RoTxn, index: &'a Index, index_uid: &'a String) -> Search<'a> {
|
||||
pub fn new(rtxn: &'a heed::RoTxn, index: &'a Index) -> Search<'a> {
|
||||
Search {
|
||||
index_uid,
|
||||
query: None,
|
||||
vector: None,
|
||||
filter: None,
|
||||
@ -158,7 +156,7 @@ impl<'a> Search<'a> {
|
||||
|
||||
pub fn execute_for_candidates(&self, has_vector_search: bool) -> Result<RoaringBitmap> {
|
||||
if has_vector_search {
|
||||
let ctx = SearchContext::new(self.index, self.index_uid.clone(), self.rtxn);
|
||||
let ctx = SearchContext::new(self.index, self.rtxn);
|
||||
filtered_universe(&ctx, &self.filter)
|
||||
} else {
|
||||
Ok(self.execute()?.candidates)
|
||||
@ -175,7 +173,7 @@ impl<'a> Search<'a> {
|
||||
}
|
||||
};
|
||||
|
||||
let mut ctx = SearchContext::new(self.index, self.index_uid.to_string(), self.rtxn);
|
||||
let mut ctx = SearchContext::new(self.index, self.rtxn);
|
||||
|
||||
if let Some(searchable_attributes) = self.searchable_attributes {
|
||||
ctx.searchable_attributes(searchable_attributes)?;
|
||||
@ -226,7 +224,6 @@ impl<'a> Search<'a> {
|
||||
impl fmt::Debug for Search<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let Search {
|
||||
index_uid,
|
||||
query,
|
||||
vector: _,
|
||||
filter,
|
||||
@ -245,7 +242,6 @@ impl fmt::Debug for Search<'_> {
|
||||
embedder_name,
|
||||
} = self;
|
||||
f.debug_struct("Search")
|
||||
.field("index_uid", index_uid)
|
||||
.field("query", query)
|
||||
.field("vector", &"[...]")
|
||||
.field("filter", filter)
|
||||
|
@ -58,7 +58,6 @@ use crate::{
|
||||
/// A structure used throughout the execution of a search query.
|
||||
pub struct SearchContext<'ctx> {
|
||||
pub index: &'ctx Index,
|
||||
pub index_uid: String,
|
||||
pub txn: &'ctx RoTxn<'ctx>,
|
||||
pub db_cache: DatabaseCache<'ctx>,
|
||||
pub word_interner: DedupInterner<String>,
|
||||
@ -69,10 +68,9 @@ pub struct SearchContext<'ctx> {
|
||||
}
|
||||
|
||||
impl<'ctx> SearchContext<'ctx> {
|
||||
pub fn new(index: &'ctx Index, index_uid: String, txn: &'ctx RoTxn<'ctx>) -> Self {
|
||||
pub fn new(index: &'ctx Index, txn: &'ctx RoTxn<'ctx>) -> Self {
|
||||
Self {
|
||||
index,
|
||||
index_uid,
|
||||
txn,
|
||||
db_cache: <_>::default(),
|
||||
word_interner: <_>::default(),
|
||||
@ -708,7 +706,7 @@ fn check_sort_criteria(ctx: &SearchContext, sort_criteria: Option<&Vec<AscDesc>>
|
||||
|
||||
return Err(UserError::InvalidSortableAttribute {
|
||||
field: field.to_string(),
|
||||
index: ctx.index_uid.clone(),
|
||||
index: ctx.index.name.clone(),
|
||||
valid_fields,
|
||||
hidden_fields,
|
||||
}
|
||||
@ -720,7 +718,7 @@ fn check_sort_criteria(ctx: &SearchContext, sort_criteria: Option<&Vec<AscDesc>>
|
||||
|
||||
return Err(UserError::InvalidSortableAttribute {
|
||||
field: "_geo".to_string(),
|
||||
index: ctx.index_uid.clone(),
|
||||
index: ctx.index.name.clone(),
|
||||
valid_fields,
|
||||
hidden_fields,
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
let index = Index::new("", options, &path).unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let config = IndexerConfig::default();
|
||||
|
@ -25,6 +25,7 @@ impl<'t, 'i> ClearDocuments<'t, 'i> {
|
||||
|
||||
self.index.set_updated_at(self.wtxn, &OffsetDateTime::now_utc())?;
|
||||
let Index {
|
||||
name: _name,
|
||||
env: _env,
|
||||
main: _main,
|
||||
external_documents_ids,
|
||||
|
@ -13,7 +13,7 @@ fn test_facet_distribution_with_no_facet_values() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
let index = Index::new("", options, &path).unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let config = IndexerConfig::default();
|
||||
|
@ -31,7 +31,7 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
let index = Index::new("", options, &path).unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let config = IndexerConfig::default();
|
||||
|
@ -262,7 +262,7 @@ fn criteria_ascdesc() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(12 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
let index = Index::new("", options, &path).unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let config = IndexerConfig::default();
|
||||
|
@ -104,7 +104,7 @@ fn test_typo_disabled_on_word() {
|
||||
let tmp = tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(4096 * 100);
|
||||
let index = Index::new(options, tmp.path()).unwrap();
|
||||
let index = Index::new("", options, tmp.path()).unwrap();
|
||||
|
||||
let mut builder = milli::documents::DocumentsBatchBuilder::new(Vec::new());
|
||||
let doc1 = json!({
|
||||
|
Loading…
Reference in New Issue
Block a user