update codebase with shcema refactor

This commit is contained in:
mpostma 2020-12-15 12:04:51 +01:00
parent 54686b0505
commit 2904ca7f57
9 changed files with 27 additions and 28 deletions

View File

@ -350,7 +350,7 @@ impl Database {
index.main.put_name(&mut writer, name)?; index.main.put_name(&mut writer, name)?;
index.main.put_created_at(&mut writer)?; index.main.put_created_at(&mut writer)?;
index.main.put_updated_at(&mut writer)?; index.main.put_updated_at(&mut writer)?;
index.main.put_schema(&mut writer, &Schema::new())?; index.main.put_schema(&mut writer, &Schema::default())?;
let env_clone = self.env.clone(); let env_clone = self.env.clone();
let update_env_clone = self.update_env.clone(); let update_env_clone = self.update_env.clone();

View File

@ -245,8 +245,8 @@ mod test {
#[test] #[test]
fn test_facet_key() { fn test_facet_key() {
let mut schema = Schema::new(); let mut schema = Schema::default();
let id = schema.insert_and_index("hello").unwrap(); let id = schema.insert_with_position("hello").unwrap().0;
let facet_list = [schema.id("hello").unwrap()]; let facet_list = [schema.id("hello").unwrap()];
assert_eq!( assert_eq!(
FacetKey::from_str("hello:12", &schema, &facet_list).unwrap(), FacetKey::from_str("hello:12", &schema, &facet_list).unwrap(),
@ -286,8 +286,8 @@ mod test {
#[test] #[test]
fn test_parse_facet_array() { fn test_parse_facet_array() {
use either::Either::{Left, Right}; use either::Either::{Left, Right};
let mut schema = Schema::new(); let mut schema = Schema::default();
let _id = schema.insert_and_index("hello").unwrap(); let _id = schema.insert_with_position("hello").unwrap();
let facet_list = [schema.id("hello").unwrap()]; let facet_list = [schema.id("hello").unwrap()];
assert_eq!( assert_eq!(
FacetFilter::from_str("[[\"hello:12\"]]", &schema, &facet_list).unwrap(), FacetFilter::from_str("[[\"hello:12\"]]", &schema, &facet_list).unwrap(),

View File

@ -415,8 +415,7 @@ mod tests {
let mut final_indexes = Vec::new(); let mut final_indexes = Vec::new();
for index in indexes { for index in indexes {
let name = index.attribute.to_string(); let name = index.attribute.to_string();
schema.insert(&name).unwrap(); let indexed_pos = schema.insert_with_position(&name).unwrap().1;
let indexed_pos = schema.set_indexed(&name).unwrap().1;
let index = DocIndex { let index = DocIndex {
attribute: indexed_pos.0, attribute: indexed_pos.0,
..*index ..*index
@ -447,7 +446,7 @@ mod tests {
.postings_lists .postings_lists
.put_postings_list(&mut writer, &word, &postings_list) .put_postings_list(&mut writer, &word, &postings_list)
.unwrap(); .unwrap();
} }
for ((docid, attr, _), count) in fields_counts { for ((docid, attr, _), count) in fields_counts {
let prev = index let prev = index
@ -461,7 +460,7 @@ mod tests {
.documents_fields_counts .documents_fields_counts
.put_document_field_count(&mut writer, docid, IndexedPos(attr), prev + count) .put_document_field_count(&mut writer, docid, IndexedPos(attr), prev + count)
.unwrap(); .unwrap();
} }
writer.commit().unwrap(); writer.commit().unwrap();

View File

@ -13,7 +13,7 @@ static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
regex::Regex::new(r"(asc|desc)\(([a-zA-Z0-9-_]*)\)").unwrap() regex::Regex::new(r"(asc|desc)\(([a-zA-Z0-9-_]*)\)").unwrap()
}); });
#[derive(Default, Clone, Serialize, Deserialize)] #[derive(Default, Clone, Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Settings { pub struct Settings {
#[serde(default, deserialize_with = "deserialize_some")] #[serde(default, deserialize_with = "deserialize_some")]

View File

@ -126,13 +126,13 @@ where A: AsRef<[u8]>,
let serialized = serde_json::to_vec(value)?; let serialized = serde_json::to_vec(value)?;
documents_fields.put_document_field(writer, document_id, field_id, &serialized)?; documents_fields.put_document_field(writer, document_id, field_id, &serialized)?;
if let Some(indexed_pos) = schema.is_indexed(field_id) { if let Some(indexed_pos) = schema.is_searchable(field_id) {
let number_of_words = index_value(indexer, document_id, *indexed_pos, value); let number_of_words = index_value(indexer, document_id, indexed_pos, value);
if let Some(number_of_words) = number_of_words { if let Some(number_of_words) = number_of_words {
documents_fields_counts.put_document_field_count( documents_fields_counts.put_document_field_count(
writer, writer,
document_id, document_id,
*indexed_pos, indexed_pos,
number_of_words as u16, number_of_words as u16,
)?; )?;
} }
@ -228,7 +228,7 @@ pub fn apply_addition<'a, 'b, 'c>(
for (document_id, document) in &documents_additions { for (document_id, document) in &documents_additions {
// For each key-value pair in the document. // For each key-value pair in the document.
for (attribute, value) in document { for (attribute, value) in document {
let field_id = schema.insert_and_index(&attribute)?; let (field_id, _) = schema.insert_with_position(&attribute)?;
index_document( index_document(
writer, writer,
index.documents_fields, index.documents_fields,

View File

@ -71,14 +71,14 @@ pub fn apply_settings_update(
match settings.searchable_attributes.clone() { match settings.searchable_attributes.clone() {
UpdateState::Update(v) => { UpdateState::Update(v) => {
if v.iter().any(|e| e == "*") || v.is_empty() { if v.iter().any(|e| e == "*") || v.is_empty() {
schema.set_all_fields_as_indexed(); schema.set_all_searchable();
} else { } else {
schema.update_indexed(v)?; schema.update_searchable(v)?;
} }
must_reindex = true; must_reindex = true;
}, },
UpdateState::Clear => { UpdateState::Clear => {
schema.set_all_fields_as_indexed(); schema.set_all_searchable();
must_reindex = true; must_reindex = true;
}, },
UpdateState::Nothing => (), UpdateState::Nothing => (),
@ -86,13 +86,13 @@ pub fn apply_settings_update(
match settings.displayed_attributes.clone() { match settings.displayed_attributes.clone() {
UpdateState::Update(v) => { UpdateState::Update(v) => {
if v.contains("*") || v.is_empty() { if v.contains("*") || v.is_empty() {
schema.set_all_fields_as_displayed(); schema.set_all_displayed();
} else { } else {
schema.update_displayed(v)? schema.update_displayed(v)?
} }
}, },
UpdateState::Clear => { UpdateState::Clear => {
schema.set_all_fields_as_displayed(); schema.set_all_displayed();
}, },
UpdateState::Nothing => (), UpdateState::Nothing => (),
} }

View File

@ -178,7 +178,7 @@ impl<'a> SearchBuilder<'a> {
all_attributes.extend(&all_formatted); all_attributes.extend(&all_formatted);
}, },
None => { None => {
all_attributes.extend(schema.displayed_name()); all_attributes.extend(schema.displayed_names());
// If we specified at least one attribute to highlight or crop then // If we specified at least one attribute to highlight or crop then
// all available attributes will be returned in the _formatted field. // all available attributes will be returned in the _formatted field.
if self.attributes_to_highlight.is_some() || self.attributes_to_crop.is_some() { if self.attributes_to_highlight.is_some() || self.attributes_to_crop.is_some() {
@ -445,7 +445,7 @@ fn calculate_matches(
continue; continue;
} }
} }
if !schema.displayed_name().contains(attribute) { if !schema.displayed_names().contains(&attribute) {
continue; continue;
} }
if let Some(pos) = matches_result.get_mut(attribute) { if let Some(pos) = matches_result.get_mut(attribute) {

View File

@ -1,4 +1,4 @@
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet, BTreeSet};
use actix_web::{get, post, web, HttpResponse}; use actix_web::{get, post, web, HttpResponse};
use log::warn; use log::warn;
@ -120,8 +120,8 @@ impl SearchQuery {
search_builder.limit(limit); search_builder.limit(limit);
} }
let available_attributes = schema.displayed_name(); let available_attributes = schema.displayed_names();
let mut restricted_attributes: HashSet<&str>; let mut restricted_attributes: BTreeSet<&str>;
match &self.attributes_to_retrieve { match &self.attributes_to_retrieve {
Some(attributes_to_retrieve) => { Some(attributes_to_retrieve) => {
let attributes_to_retrieve: HashSet<&str> = let attributes_to_retrieve: HashSet<&str> =
@ -129,7 +129,7 @@ impl SearchQuery {
if attributes_to_retrieve.contains("*") { if attributes_to_retrieve.contains("*") {
restricted_attributes = available_attributes.clone(); restricted_attributes = available_attributes.clone();
} else { } else {
restricted_attributes = HashSet::new(); restricted_attributes = BTreeSet::new();
search_builder.attributes_to_retrieve(HashSet::new()); search_builder.attributes_to_retrieve(HashSet::new());
for attr in attributes_to_retrieve { for attr in attributes_to_retrieve {
if available_attributes.contains(attr) { if available_attributes.contains(attr) {

View File

@ -523,11 +523,11 @@ async fn delete_attributes_for_faceting(
} }
fn get_indexed_attributes(schema: &Schema) -> Vec<String> { fn get_indexed_attributes(schema: &Schema) -> Vec<String> {
if schema.is_indexed_all() { if schema.is_searchable_all() {
["*"].iter().map(|s| s.to_string()).collect() ["*"].iter().map(|s| s.to_string()).collect()
} else { } else {
schema schema
.indexed_name() .searchable_names()
.iter() .iter()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect() .collect()
@ -539,7 +539,7 @@ fn get_displayed_attributes(schema: &Schema) -> BTreeSet<String> {
["*"].iter().map(|s| s.to_string()).collect() ["*"].iter().map(|s| s.to_string()).collect()
} else { } else {
schema schema
.displayed_name() .displayed_names()
.iter() .iter()
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect() .collect()