mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-11 23:48:56 +01:00
Merge #794
794: Check database version mismatch r=MarinPostma a=MarinPostma Checks if the versions of the database and the engine are compatible. The database and the engine are compatible if they share the same major and minor version. The engine will refuse to start if there is a mismatch. @bidoubiwa do we need to document this? Co-authored-by: mpostma <postma.marin@protonmail.com>
This commit is contained in:
commit
2775aeb6ac
@ -1,6 +1,7 @@
|
||||
## v0.13.0 (unreleased)
|
||||
|
||||
- placeholder search (#771)
|
||||
- placeholder search (#771)
|
||||
- Add database version mismatch check (#794)
|
||||
|
||||
## v0.12.0
|
||||
|
||||
|
@ -3,13 +3,15 @@ use std::fs::File;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::{fs, thread};
|
||||
use std::io::{Read, Write, ErrorKind};
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use heed::types::{Str, Unit, SerdeBincode};
|
||||
use heed::CompactionOption;
|
||||
use heed::types::{Str, Unit, SerdeBincode};
|
||||
use log::{debug, error};
|
||||
use meilisearch_schema::Schema;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::{store, update, Index, MResult, Error};
|
||||
|
||||
@ -161,11 +163,69 @@ fn update_awaiter(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensures Meilisearch version is compatible with the database, returns an error versions mismatch.
|
||||
/// If create is set to true, a VERSION file is created with the current version.
|
||||
fn version_guard(path: &Path, create: bool) -> MResult<()> {
|
||||
let current_version_major = env!("CARGO_PKG_VERSION_MAJOR");
|
||||
let current_version_minor = env!("CARGO_PKG_VERSION_MINOR");
|
||||
let current_version_patch = env!("CARGO_PKG_VERSION_PATCH");
|
||||
let version_path = path.join("VERSION");
|
||||
|
||||
match File::open(&version_path) {
|
||||
Ok(mut file) => {
|
||||
let mut version = String::new();
|
||||
file.read_to_string(&mut version)?;
|
||||
// Matches strings like XX.XX.XX
|
||||
let re = Regex::new(r"(\d+).(\d+).(\d+)").unwrap();
|
||||
|
||||
// Make sure there is a result
|
||||
let version = re
|
||||
.captures_iter(&version)
|
||||
.next()
|
||||
.ok_or(Error::VersionMismatch("bad VERSION file".to_string()))?;
|
||||
// the first is always the complete match, safe to unwrap because we have a match
|
||||
let version_major = version.get(1).unwrap().as_str();
|
||||
let version_minor = version.get(2).unwrap().as_str();
|
||||
|
||||
if version_major != current_version_major || version_minor != current_version_minor {
|
||||
return Err(Error::VersionMismatch(format!("{}.{}.XX", version_major, version_minor)));
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
match error.kind() {
|
||||
ErrorKind::NotFound => {
|
||||
if create {
|
||||
// when no version file is found, and we've been told to create one,
|
||||
// create a new file with the current version in it.
|
||||
let mut version_file = File::create(&version_path)?;
|
||||
version_file.write_all(format!("{}.{}.{}",
|
||||
current_version_major,
|
||||
current_version_minor,
|
||||
current_version_patch).as_bytes())?;
|
||||
} else {
|
||||
// when no version file is found and we were not told to create one, this
|
||||
// means that the version is inferior to the one this feature was added in.
|
||||
return Err(Error::VersionMismatch(format!("<0.12.0")));
|
||||
}
|
||||
}
|
||||
_ => return Err(error.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub fn open_or_create(path: impl AsRef<Path>, options: DatabaseOptions) -> MResult<Database> {
|
||||
let main_path = path.as_ref().join("main");
|
||||
let update_path = path.as_ref().join("update");
|
||||
|
||||
//create db directory
|
||||
fs::create_dir_all(&path)?;
|
||||
|
||||
// create file only if main db wasn't created before (first run)
|
||||
version_guard(path.as_ref(), !main_path.exists() && !update_path.exists())?;
|
||||
|
||||
fs::create_dir_all(&main_path)?;
|
||||
let env = heed::EnvOpenOptions::new()
|
||||
.map_size(options.main_map_size)
|
||||
|
@ -15,22 +15,23 @@ pub type MResult<T> = Result<T, Error>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Io(io::Error),
|
||||
IndexAlreadyExists,
|
||||
MissingPrimaryKey,
|
||||
SchemaMissing,
|
||||
WordIndexMissing,
|
||||
MissingDocumentId,
|
||||
MaxFieldsLimitExceeded,
|
||||
Schema(meilisearch_schema::Error),
|
||||
Heed(heed::Error),
|
||||
Fst(fst::Error),
|
||||
SerdeJson(SerdeJsonError),
|
||||
Bincode(bincode::Error),
|
||||
Serializer(SerializerError),
|
||||
Deserializer(DeserializerError),
|
||||
FilterParseError(PestError<Rule>),
|
||||
FacetError(FacetError),
|
||||
FilterParseError(PestError<Rule>),
|
||||
Fst(fst::Error),
|
||||
Heed(heed::Error),
|
||||
IndexAlreadyExists,
|
||||
Io(io::Error),
|
||||
MaxFieldsLimitExceeded,
|
||||
MissingDocumentId,
|
||||
MissingPrimaryKey,
|
||||
Schema(meilisearch_schema::Error),
|
||||
SchemaMissing,
|
||||
SerdeJson(SerdeJsonError),
|
||||
Serializer(SerializerError),
|
||||
VersionMismatch(String),
|
||||
WordIndexMissing,
|
||||
}
|
||||
|
||||
impl ErrorCode for Error {
|
||||
@ -53,6 +54,7 @@ impl ErrorCode for Error {
|
||||
| Bincode(_)
|
||||
| Serializer(_)
|
||||
| Deserializer(_)
|
||||
| VersionMismatch(_)
|
||||
| Io(_) => Code::Internal,
|
||||
}
|
||||
}
|
||||
@ -141,22 +143,27 @@ impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use self::Error::*;
|
||||
match self {
|
||||
Io(e) => write!(f, "{}", e),
|
||||
IndexAlreadyExists => write!(f, "index already exists"),
|
||||
MissingPrimaryKey => write!(f, "schema cannot be built without a primary key"),
|
||||
SchemaMissing => write!(f, "this index does not have a schema"),
|
||||
WordIndexMissing => write!(f, "this index does not have a word index"),
|
||||
MissingDocumentId => write!(f, "document id is missing"),
|
||||
MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"),
|
||||
Schema(e) => write!(f, "schema error; {}", e),
|
||||
Heed(e) => write!(f, "heed error; {}", e),
|
||||
Fst(e) => write!(f, "fst error; {}", e),
|
||||
SerdeJson(e) => write!(f, "serde json error; {}", e),
|
||||
Bincode(e) => write!(f, "bincode error; {}", e),
|
||||
Serializer(e) => write!(f, "serializer error; {}", e),
|
||||
Deserializer(e) => write!(f, "deserializer error; {}", e),
|
||||
FilterParseError(e) => write!(f, "error parsing filter; {}", e),
|
||||
FacetError(e) => write!(f, "error processing facet filter: {}", e),
|
||||
FilterParseError(e) => write!(f, "error parsing filter; {}", e),
|
||||
Fst(e) => write!(f, "fst error; {}", e),
|
||||
Heed(e) => write!(f, "heed error; {}", e),
|
||||
IndexAlreadyExists => write!(f, "index already exists"),
|
||||
Io(e) => write!(f, "{}", e),
|
||||
MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"),
|
||||
MissingDocumentId => write!(f, "document id is missing"),
|
||||
MissingPrimaryKey => write!(f, "schema cannot be built without a primary key"),
|
||||
Schema(e) => write!(f, "schema error; {}", e),
|
||||
SchemaMissing => write!(f, "this index does not have a schema"),
|
||||
SerdeJson(e) => write!(f, "serde json error; {}", e),
|
||||
Serializer(e) => write!(f, "serializer error; {}", e),
|
||||
VersionMismatch(version) => write!(f, "Cannot open database, expected MeiliSearch engine version: {}, currrent engine version: {}.{}.{}",
|
||||
version,
|
||||
env!("CARGO_PKG_VERSION_MAJOR"),
|
||||
env!("CARGO_PKG_VERSION_MINOR"),
|
||||
env!("CARGO_PKG_VERSION_PATCH")),
|
||||
WordIndexMissing => write!(f, "this index does not have a word index"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::error::Error;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -55,7 +56,7 @@ impl ApiKeys {
|
||||
}
|
||||
|
||||
impl Data {
|
||||
pub fn new(opt: Opt) -> Data {
|
||||
pub fn new(opt: Opt) -> Result<Data, Box<dyn Error>> {
|
||||
let db_path = opt.db_path.clone();
|
||||
let server_pid = sysinfo::get_current_pid().unwrap();
|
||||
|
||||
@ -66,7 +67,7 @@ impl Data {
|
||||
|
||||
let http_payload_size_limit = opt.http_payload_size_limit;
|
||||
|
||||
let db = Arc::new(Database::open_or_create(opt.db_path, db_opt).unwrap());
|
||||
let db = Arc::new(Database::open_or_create(opt.db_path, db_opt)?);
|
||||
|
||||
let mut api_keys = ApiKeys {
|
||||
master: opt.master_key,
|
||||
@ -93,6 +94,6 @@ impl Data {
|
||||
index_update_callback(&index_uid, &callback_context, status);
|
||||
}));
|
||||
|
||||
data
|
||||
Ok(data)
|
||||
}
|
||||
}
|
||||
|
@ -51,7 +51,7 @@ async fn main() -> Result<(), MainError> {
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
let data = Data::new(opt.clone());
|
||||
let data = Data::new(opt.clone())?;
|
||||
|
||||
if !opt.no_analytics {
|
||||
let analytics_data = data.clone();
|
||||
|
@ -50,7 +50,7 @@ impl Server {
|
||||
..Opt::default()
|
||||
};
|
||||
|
||||
let data = Data::new(opt.clone());
|
||||
let data = Data::new(opt.clone()).unwrap();
|
||||
|
||||
Server {
|
||||
uid: uid.to_string(),
|
||||
|
Loading…
Reference in New Issue
Block a user