1176: fix race condition in  document addition r=Kerollmops a=MarinPostma

As described in #1160, there was a race condition when updating settings and adding documents simultaneously. This was due to the schema being updated and document addition being processed in two different transactions. This PR moves the schema update logic for the primary key in the same transaction as the document addition, while maintaining the input checks for the validity of the primary key in the http route, in order not to break the error reporting for the document addition route.

close #1160.

Co-authored-by: mpostma <postma.marin@protonmail.com>
Co-authored-by: marin <postma.marin@protonmail.com>
This commit is contained in:
bors[bot] 2021-02-02 09:26:32 +00:00 committed by GitHub
commit c984fa1071
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 98 additions and 63 deletions

View file

@ -128,15 +128,15 @@ fn import_index_v1(
// push document in buffer
values.push(document?);
// if buffer is full, create and apply a batch, and clean buffer
if values.len() == document_batch_size {
if values.len() == document_batch_size {
let batch = std::mem::replace(&mut values, Vec::with_capacity(document_batch_size));
apply_documents_addition(write_txn, &index, batch)?;
apply_documents_addition(write_txn, &index, batch, None)?;
}
}
// apply documents remaining in the buffer
if !values.is_empty() {
apply_documents_addition(write_txn, &index, values)?;
// apply documents remaining in the buffer
if !values.is_empty() {
apply_documents_addition(write_txn, &index, values, None)?;
}
// sync index information: stats, updated_at, last_update
@ -289,7 +289,6 @@ fn dump_index_documents(data: &web::Data<Data>, reader: &MainReader, dir_path: &
/// Write error with a context.
fn fail_dump_process<E: std::error::Error>(data: &web::Data<Data>, dump_info: DumpInfo, context: &str, error: E) {
let error_message = format!("{}; {}", context, error);
error!("Something went wrong during dump process: {}", &error_message);
data.set_current_dump_info(dump_info.with_error(Error::dump_failed(error_message).into()))
}
@ -405,7 +404,7 @@ pub fn init_dump_process(data: &web::Data<Data>, dumps_dir: &Path) -> Result<Dum
let dumps_dir = dumps_dir.to_path_buf();
let info_cloned = info.clone();
// run dump process in a new thread
thread::spawn(move ||
thread::spawn(move ||
dump_process(data, dumps_dir, info_cloned)
);

View file

@ -132,7 +132,7 @@ async fn get_all_documents(
let limit = params.limit.unwrap_or(20);
let index_uid = &path.index_uid;
let reader = data.db.main_read_txn()?;
let documents = get_all_documents_sync(
&data,
&reader,
@ -145,15 +145,6 @@ async fn get_all_documents(
Ok(HttpResponse::Ok().json(documents))
}
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
for key in document.keys() {
if key.to_lowercase().contains("id") {
return Some(key.to_string());
}
}
None
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct UpdateDocumentsQuery {
@ -168,26 +159,6 @@ async fn update_multiple_documents(
is_partial: bool,
) -> Result<HttpResponse, ResponseError> {
let update_id = data.get_or_create_index(&path.index_uid, |index| {
let reader = data.db.main_read_txn()?;
let mut schema = index
.main
.schema(&reader)?
.ok_or(meilisearch_core::Error::SchemaMissing)?;
if schema.primary_key().is_none() {
let id = match &params.primary_key {
Some(id) => id.to_string(),
None => body
.first()
.and_then(find_primary_key)
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?,
};
schema.set_primary_key(&id).map_err(Error::bad_request)?;
data.db.main_write(|w| index.main.put_schema(w, &schema))?;
}
let mut document_addition = if is_partial {
index.documents_partial_addition()
@ -195,6 +166,27 @@ async fn update_multiple_documents(
index.documents_addition()
};
// Return an early error if primary key is already set, otherwise, try to set it up in the
// update later.
let reader = data.db.main_read_txn()?;
let schema = index
.main
.schema(&reader)?
.ok_or(meilisearch_core::Error::SchemaMissing)?;
match (params.into_inner().primary_key, schema.primary_key()) {
(Some(_), Some(_)) => return Err(meilisearch_schema::Error::PrimaryKeyAlreadyPresent)?,
(Some(key), None) => document_addition.set_primary_key(key),
(None, None) => {
let key = body
.first()
.and_then(find_primary_key)
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?;
document_addition.set_primary_key(key);
}
(None, Some(_)) => ()
}
for document in body.into_inner() {
document_addition.update_document(document);
}
@ -204,6 +196,15 @@ async fn update_multiple_documents(
return Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)));
}
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
for key in document.keys() {
if key.to_lowercase().contains("id") {
return Some(key.to_string());
}
}
None
}
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
async fn add_documents(
data: web::Data<Data>,