mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-22 06:11:01 +00:00
Compare commits
73 Commits
Author | SHA1 | Date | |
---|---|---|---|
4ba5e22f64 | |||
a8ab15d65d | |||
39cf1931ae | |||
bbb6771625 | |||
e9f9f270e1 | |||
190b78b7be | |||
257f9fb2b2 | |||
d35a104ad3 | |||
9bae7a35bf | |||
33c7c5a7e3 | |||
91363daeaa | |||
f9ab85adbe | |||
9dbf43d3e7 | |||
772f4d6671 | |||
1b57218739 | |||
8767269b47 | |||
baceaed582 | |||
62a28bc2a1 | |||
f83caa6c40 | |||
53b1483e71 | |||
a0eafea200 | |||
10dace305d | |||
1eace79f77 | |||
e6033e174d | |||
f1925b8f71 | |||
834f3cc192 | |||
e049aead16 | |||
0a9c9670e7 | |||
1744dcebfe | |||
29712916e6 | |||
4d2783bb04 | |||
50f0fbb05c | |||
5a842ec94a | |||
372680e2ab | |||
6465a3f549 | |||
690eab4a25 | |||
dc2e5ceed2 | |||
1639a7338d | |||
ac7226bb27 | |||
086020e543 | |||
452d456fad | |||
f741942226 | |||
a27399cf65 | |||
29b8810db8 | |||
a5a47911d1 | |||
7bf6a3d7b2 | |||
0cabcb7c79 | |||
f359b64d59 | |||
2f3ecab8d9 | |||
17f71a1a55 | |||
bfe3bb0eeb | |||
0a67248bfe | |||
2644f087d0 | |||
91c8c7a2e3 | |||
029abd3413 | |||
726756bad4 | |||
10c56d9919 | |||
5f59f93804 | |||
704defea78 | |||
eb240c8b60 | |||
c3bcd7a410 | |||
26124e6436 | |||
3cd6f5c7ea | |||
7c646e031c | |||
0a2ca075d3 | |||
b406b6ee44 | |||
726e867058 | |||
f4d918d22a | |||
5ef3a01b6c | |||
5a98f1f076 | |||
0ca44b6a82 | |||
ae2de4d0c4 | |||
e47b4acd08 |
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
34
.github/workflows/coverage.yml
vendored
Normal file
34
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
---
|
||||
on:
|
||||
pull_request:
|
||||
types: [review_requested, ready_for_review]
|
||||
|
||||
name: Execute code coverage
|
||||
|
||||
jobs:
|
||||
nightly-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clean
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --all-features --no-fail-fast
|
||||
env:
|
||||
CARGO_INCREMENTAL: "0"
|
||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
||||
- uses: actions-rs/grcov@v0.1
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
file: ${{ steps.coverage.outputs.report }}
|
||||
yml: ./codecov.yml
|
||||
fail_ci_if_error: true
|
@ -1,6 +1,10 @@
|
||||
## v0.16.0
|
||||
|
||||
- Automatically create index on document push if index doesn't exist (#914)
|
||||
- Sort displayedAttributes and facetDistribution (#946)
|
||||
|
||||
## v0.15.0
|
||||
|
||||
- Dumps (#887)
|
||||
- Update actix-web dependency to 3.0.0 (#963)
|
||||
- Consider an empty query to be a placeholder search (#916)
|
||||
|
||||
@ -10,6 +14,7 @@
|
||||
|
||||
## v0.14.0
|
||||
|
||||
- Sort displayedAttributes (#943)
|
||||
- Fix facet distribution case (#797)
|
||||
- Snapshotting (#839)
|
||||
- Fix bucket-sort unwrap bug (#915)
|
||||
|
493
Cargo.lock
generated
493
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -60,6 +60,10 @@ meilisearch
|
||||
docker run -p 7700:7700 -v $(pwd)/data.ms:/data.ms getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Try MeiliSearch in our Sandbox
|
||||
|
||||
Create a MeiliSearch instance in [MeiliSearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 72 hours.
|
||||
|
||||
#### Run on Digital Ocean
|
||||
|
||||
[](https://marketplace.digitalocean.com/apps/meilisearch?action=deploy&refcode=7c67bd97e101)
|
||||
|
@ -1 +1 @@
|
||||
_datas in movies.csv are from https://www.themoviedb.org/_
|
||||
_datas in movies.json are from https://www.themoviedb.org/_
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,10 +0,0 @@
|
||||
{
|
||||
"searchableAttributes": ["title", "overview"],
|
||||
"displayedAttributes": [
|
||||
"id",
|
||||
"title",
|
||||
"overview",
|
||||
"release_date",
|
||||
"poster"
|
||||
]
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-core"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
@ -9,43 +9,43 @@ edition = "2018"
|
||||
arc-swap = "0.4.5"
|
||||
bincode = "1.2.1"
|
||||
byteorder = "1.3.4"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
compact_arena = "0.4.0"
|
||||
cow-utils = "0.1.2"
|
||||
crossbeam-channel = "0.4.2"
|
||||
crossbeam-channel = "0.5.0"
|
||||
deunicode = "1.1.0"
|
||||
either = "1.5.3"
|
||||
env_logger = "0.7.1"
|
||||
fst = "0.4.4"
|
||||
hashbrown = { version = "0.7.1", features = ["serde"] }
|
||||
hashbrown = { version = "0.9.0", features = ["serde"] }
|
||||
heed = "0.8.0"
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
intervaltree = "0.2.5"
|
||||
itertools = "0.9.0"
|
||||
levenshtein_automata = { version = "0.2.0", features = ["fst_automaton"] }
|
||||
log = "0.4.8"
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.15.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.15.0" }
|
||||
meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.15.0" }
|
||||
meilisearch-types = { path = "../meilisearch-types", version = "0.15.0" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.16.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.16.0" }
|
||||
meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.16.0" }
|
||||
meilisearch-types = { path = "../meilisearch-types", version = "0.16.0" }
|
||||
once_cell = "1.3.1"
|
||||
ordered-float = { version = "1.0.2", features = ["serde"] }
|
||||
ordered-float = { version = "2.0.0", features = ["serde"] }
|
||||
pest = { git = "https://github.com/pest-parser/pest.git", rev = "51fd1d49f1041f7839975664ef71fe15c7dcaf67" }
|
||||
pest_derive = "2.0"
|
||||
regex = "1.3.6"
|
||||
regex = "1.4.1"
|
||||
sdset = "0.4.0"
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
serde_json = { version = "1.0.59", features = ["preserve_order"] }
|
||||
slice-group-by = "0.2.6"
|
||||
unicase = "2.6.0"
|
||||
zerocopy = "0.3.0"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = "1.3.0"
|
||||
assert_matches = "1.4.0"
|
||||
criterion = "0.3.1"
|
||||
csv = "1.1.3"
|
||||
rustyline = { version = "6.0.0", default-features = false }
|
||||
structopt = "0.3.12"
|
||||
structopt = "0.3.20"
|
||||
tempfile = "3.1.0"
|
||||
termcolor = "1.1.0"
|
||||
|
||||
|
@ -98,7 +98,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
|
||||
let bench_name = BenchmarkId::from_parameter(format!("{:?}", query));
|
||||
group.bench_with_input(bench_name, &query, |b, query| b.iter(|| {
|
||||
let builder = index.query_builder();
|
||||
builder.query(&reader, query, 0..20).unwrap();
|
||||
builder.query(&reader, Some(*query), 0..20).unwrap();
|
||||
}));
|
||||
}
|
||||
group.finish();
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::collections::hash_map::{Entry, HashMap};
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, RwLock};
|
||||
@ -27,7 +28,6 @@ pub type MainReader = heed::RoTxn<MainT>;
|
||||
pub type UpdateWriter<'a> = heed::RwTxn<'a, UpdateT>;
|
||||
pub type UpdateReader = heed::RoTxn<UpdateT>;
|
||||
|
||||
const UNHEALTHY_KEY: &str = "_is_unhealthy";
|
||||
const LAST_UPDATE_KEY: &str = "last-update";
|
||||
|
||||
pub struct MainT;
|
||||
@ -193,9 +193,9 @@ fn version_guard(path: &Path, create: bool) -> MResult<(u32, u32, u32)> {
|
||||
Err(Error::VersionMismatch(format!("{}.{}.XX", version_major, version_minor)))
|
||||
} else {
|
||||
Ok((
|
||||
version_major.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
version_minor.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
version_patch.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?
|
||||
version_major.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
version_minor.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
version_patch.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -212,9 +212,9 @@ fn version_guard(path: &Path, create: bool) -> MResult<(u32, u32, u32)> {
|
||||
current_version_patch).as_bytes())?;
|
||||
|
||||
Ok((
|
||||
current_version_major.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
current_version_minor.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
current_version_patch.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?
|
||||
current_version_major.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
current_version_minor.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
current_version_patch.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?
|
||||
))
|
||||
} else {
|
||||
// when no version file is found and we were not told to create one, this
|
||||
@ -532,23 +532,6 @@ impl Database {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_healthy(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
|
||||
let common_store = self.common_store();
|
||||
common_store.delete::<_, Str>(writer, UNHEALTHY_KEY)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_unhealthy(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
|
||||
let common_store = self.common_store();
|
||||
common_store.put::<_, Str, Unit>(writer, UNHEALTHY_KEY, &())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_health(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<()>> {
|
||||
let common_store = self.common_store();
|
||||
Ok(common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY)?)
|
||||
}
|
||||
|
||||
pub fn compute_stats(&self, writer: &mut MainWriter, index_uid: &str) -> MResult<()> {
|
||||
let index = match self.open_index(&index_uid) {
|
||||
Some(index) => index,
|
||||
@ -577,7 +560,7 @@ impl Database {
|
||||
}
|
||||
|
||||
// convert attributes to their names
|
||||
let frequency: HashMap<_, _> = fields_frequency
|
||||
let frequency: BTreeMap<_, _> = fields_frequency
|
||||
.into_iter()
|
||||
.filter_map(|(a, c)| schema.name(a).map(|name| (name.to_string(), c)))
|
||||
.collect();
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::str::FromStr;
|
||||
use std::iter::IntoIterator;
|
||||
|
||||
@ -23,7 +23,7 @@ pub struct Settings {
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub displayed_attributes: Option<Option<HashSet<String>>>,
|
||||
pub displayed_attributes: Option<Option<BTreeSet<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub stop_words: Option<Option<BTreeSet<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
@ -161,7 +161,7 @@ pub struct SettingsUpdate {
|
||||
pub distinct_attribute: UpdateState<String>,
|
||||
pub primary_key: UpdateState<String>,
|
||||
pub searchable_attributes: UpdateState<Vec<String>>,
|
||||
pub displayed_attributes: UpdateState<HashSet<String>>,
|
||||
pub displayed_attributes: UpdateState<BTreeSet<String>>,
|
||||
pub stop_words: UpdateState<BTreeSet<String>>,
|
||||
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
|
||||
pub attributes_for_faceting: UpdateState<Vec<String>>,
|
||||
|
@ -1,5 +1,5 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str, CowSlice};
|
||||
@ -31,7 +31,7 @@ const SYNONYMS_KEY: &str = "synonyms";
|
||||
const UPDATED_AT_KEY: &str = "updated-at";
|
||||
const WORDS_KEY: &str = "words";
|
||||
|
||||
pub type FreqsMap = HashMap<String, usize>;
|
||||
pub type FreqsMap = BTreeMap<String, usize>;
|
||||
type SerdeFreqsMap = SerdeBincode<FreqsMap>;
|
||||
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-error"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
authors = ["marin <postma.marin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "meilisearch-http"
|
||||
description = "MeiliSearch HTTP server"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
license = "MIT"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
@ -17,43 +17,43 @@ path = "src/main.rs"
|
||||
default = ["sentry"]
|
||||
|
||||
[dependencies]
|
||||
actix-cors = "0.3"
|
||||
actix-cors = "0.4.1"
|
||||
actix-http = "2"
|
||||
actix-rt = "1"
|
||||
actix-service = "1.0.6"
|
||||
actix-web = { version = "3", features = ["rustls"] }
|
||||
actix-web = { version = "3.1.0", features = ["rustls"] }
|
||||
bytes = "0.5.4"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
crossbeam-channel = "0.4.2"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
crossbeam-channel = "0.5.0"
|
||||
env_logger = "0.7.1"
|
||||
flate2 = "1.0.16"
|
||||
futures = "0.3.4"
|
||||
http = "0.1.19"
|
||||
flate2 = "1.0.18"
|
||||
futures = "0.3.6"
|
||||
http = "0.2.1"
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
log = "0.4.8"
|
||||
main_error = "0.1.0"
|
||||
meilisearch-core = { path = "../meilisearch-core", version = "0.15.0" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.15.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.15.0" }
|
||||
meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.15.0"}
|
||||
meilisearch-core = { path = "../meilisearch-core", version = "0.16.0" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.16.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.16.0" }
|
||||
meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.16.0"}
|
||||
mime = "0.3.16"
|
||||
once_cell = "1.4.1"
|
||||
rand = "0.7.3"
|
||||
regex = "1.3.6"
|
||||
regex = "1.4.1"
|
||||
rustls = "0.18"
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
serde_qs = "0.5.2"
|
||||
sha2 = "0.8.1"
|
||||
serde_json = { version = "1.0.59", features = ["preserve_order"] }
|
||||
serde_qs = "0.7.0"
|
||||
sha2 = "0.9.1"
|
||||
siphasher = "0.3.2"
|
||||
slice-group-by = "0.2.6"
|
||||
structopt = "0.3.12"
|
||||
structopt = "0.3.20"
|
||||
tar = "0.4.29"
|
||||
tempfile = "3.1.0"
|
||||
tokio = { version = "0.2.18", features = ["macros"] }
|
||||
ureq = { version = "0.12.0", features = ["tls"], default-features = false }
|
||||
ureq = { version = "1.5.1", features = ["tls"], default-features = false }
|
||||
walkdir = "2.3.1"
|
||||
whoami = "0.8.1"
|
||||
whoami = "0.9.0"
|
||||
|
||||
[dependencies.sentry]
|
||||
version = "0.18.1"
|
||||
|
@ -203,7 +203,7 @@
|
||||
if (e.selectedIndex == -1) { return }
|
||||
var index = e.options[e.selectedIndex].value;
|
||||
|
||||
let theUrl = `${baseUrl}/indexes/${index}/search?q=${search.value}&attributesToHighlight=*`;
|
||||
let theUrl = `${baseUrl}/indexes/${index}/search?q=${encodeURIComponent(search.value)}&attributesToHighlight=*`;
|
||||
|
||||
if (lastRequest) { lastRequest.abort() }
|
||||
lastRequest = new XMLHttpRequest();
|
||||
|
@ -3,9 +3,10 @@ use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use meilisearch_core::{Database, DatabaseOptions};
|
||||
use meilisearch_core::{Database, DatabaseOptions, Index};
|
||||
use sha2::Digest;
|
||||
|
||||
use crate::error::{Error as MSError, ResponseError};
|
||||
use crate::index_update_callback;
|
||||
use crate::option::Opt;
|
||||
|
||||
@ -26,7 +27,7 @@ impl Deref for Data {
|
||||
pub struct DataInner {
|
||||
pub db: Arc<Database>,
|
||||
pub db_path: String,
|
||||
pub dumps_folder: PathBuf,
|
||||
pub dumps_dir: PathBuf,
|
||||
pub dump_batch_size: usize,
|
||||
pub api_keys: ApiKeys,
|
||||
pub server_pid: u32,
|
||||
@ -60,7 +61,7 @@ impl ApiKeys {
|
||||
impl Data {
|
||||
pub fn new(opt: Opt) -> Result<Data, Box<dyn Error>> {
|
||||
let db_path = opt.db_path.clone();
|
||||
let dumps_folder = opt.dumps_folder.clone();
|
||||
let dumps_dir = opt.dumps_dir.clone();
|
||||
let dump_batch_size = opt.dump_batch_size;
|
||||
let server_pid = std::process::id();
|
||||
|
||||
@ -84,7 +85,7 @@ impl Data {
|
||||
let inner_data = DataInner {
|
||||
db: db.clone(),
|
||||
db_path,
|
||||
dumps_folder,
|
||||
dumps_dir,
|
||||
dump_batch_size,
|
||||
api_keys,
|
||||
server_pid,
|
||||
@ -102,4 +103,60 @@ impl Data {
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn create_index(&self, uid: &str) -> Result<Index, ResponseError> {
|
||||
if !uid
|
||||
.chars()
|
||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||
{
|
||||
return Err(MSError::InvalidIndexUid.into());
|
||||
}
|
||||
|
||||
let created_index = self.db.create_index(&uid).map_err(|e| match e {
|
||||
meilisearch_core::Error::IndexAlreadyExists => e.into(),
|
||||
_ => ResponseError::from(MSError::create_index(e)),
|
||||
})?;
|
||||
|
||||
self.db.main_write::<_, _, ResponseError>(|mut writer| {
|
||||
created_index.main.put_name(&mut writer, uid)?;
|
||||
|
||||
created_index
|
||||
.main
|
||||
.created_at(&writer)?
|
||||
.ok_or(MSError::internal("Impossible to read created at"))?;
|
||||
|
||||
created_index
|
||||
.main
|
||||
.updated_at(&writer)?
|
||||
.ok_or(MSError::internal("Impossible to read updated at"))?;
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(created_index)
|
||||
}
|
||||
|
||||
pub fn get_or_create_index<F, R>(&self, uid: &str, f: F) -> Result<R, ResponseError>
|
||||
where
|
||||
F: FnOnce(&Index) -> Result<R, ResponseError>,
|
||||
{
|
||||
let mut index_has_been_created = false;
|
||||
|
||||
let index = match self.db.open_index(&uid) {
|
||||
Some(index) => index,
|
||||
None => {
|
||||
index_has_been_created = true;
|
||||
self.create_index(&uid)?
|
||||
}
|
||||
};
|
||||
|
||||
match f(&index) {
|
||||
Ok(r) => Ok(r),
|
||||
Err(err) => {
|
||||
if index_has_been_created {
|
||||
let _ = self.db.delete_index(&uid);
|
||||
}
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,10 +13,11 @@ use meilisearch_core::settings::Settings;
|
||||
use meilisearch_core::update::{apply_settings_update, apply_documents_addition};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::Error;
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::compression;
|
||||
use crate::routes::index;
|
||||
use crate::routes::index::IndexResponse;
|
||||
@ -51,9 +52,9 @@ impl DumpMetadata {
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract DumpMetadata from `metadata.json` file present at provided `folder_path`
|
||||
fn from_path(folder_path: &Path) -> Result<Self, Error> {
|
||||
let path = folder_path.join("metadata.json");
|
||||
/// Extract DumpMetadata from `metadata.json` file present at provided `dir_path`
|
||||
fn from_path(dir_path: &Path) -> Result<Self, Error> {
|
||||
let path = dir_path.join("metadata.json");
|
||||
let file = File::open(path)?;
|
||||
let reader = std::io::BufReader::new(file);
|
||||
let metadata = serde_json::from_reader(reader)?;
|
||||
@ -61,9 +62,9 @@ impl DumpMetadata {
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Write DumpMetadata in `metadata.json` file at provided `folder_path`
|
||||
fn to_path(&self, folder_path: &Path) -> Result<(), Error> {
|
||||
let path = folder_path.join("metadata.json");
|
||||
/// Write DumpMetadata in `metadata.json` file at provided `dir_path`
|
||||
fn to_path(&self, dir_path: &Path) -> Result<(), Error> {
|
||||
let path = dir_path.join("metadata.json");
|
||||
let file = File::create(path)?;
|
||||
|
||||
serde_json::to_writer(file, &self)?;
|
||||
@ -72,9 +73,9 @@ impl DumpMetadata {
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract Settings from `settings.json` file present at provided `folder_path`
|
||||
fn settings_from_path(folder_path: &Path) -> Result<Settings, Error> {
|
||||
let path = folder_path.join("settings.json");
|
||||
/// Extract Settings from `settings.json` file present at provided `dir_path`
|
||||
fn settings_from_path(dir_path: &Path) -> Result<Settings, Error> {
|
||||
let path = dir_path.join("settings.json");
|
||||
let file = File::open(path)?;
|
||||
let reader = std::io::BufReader::new(file);
|
||||
let metadata = serde_json::from_reader(reader)?;
|
||||
@ -82,9 +83,9 @@ fn settings_from_path(folder_path: &Path) -> Result<Settings, Error> {
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Write Settings in `settings.json` file at provided `folder_path`
|
||||
fn settings_to_path(settings: &Settings, folder_path: &Path) -> Result<(), Error> {
|
||||
let path = folder_path.join("settings.json");
|
||||
/// Write Settings in `settings.json` file at provided `dir_path`
|
||||
fn settings_to_path(settings: &Settings, dir_path: &Path) -> Result<(), Error> {
|
||||
let path = dir_path.join("settings.json");
|
||||
let file = File::create(path)?;
|
||||
|
||||
serde_json::to_writer(file, settings)?;
|
||||
@ -95,7 +96,7 @@ fn settings_to_path(settings: &Settings, folder_path: &Path) -> Result<(), Error
|
||||
/// Import settings and documents of a dump with version `DumpVersion::V1` in specified index.
|
||||
fn import_index_v1(
|
||||
data: &Data,
|
||||
dumps_folder: &Path,
|
||||
dumps_dir: &Path,
|
||||
index_uid: &str,
|
||||
document_batch_size: usize,
|
||||
write_txn: &mut MainWriter,
|
||||
@ -107,12 +108,12 @@ fn import_index_v1(
|
||||
.open_index(index_uid)
|
||||
.ok_or(Error::index_not_found(index_uid))?;
|
||||
|
||||
// index folder path in dump folder
|
||||
let index_path = &dumps_folder.join(index_uid);
|
||||
// index dir path in dump dir
|
||||
let index_path = &dumps_dir.join(index_uid);
|
||||
|
||||
// extract `settings.json` file and import content
|
||||
let settings = settings_from_path(&index_path)?;
|
||||
let settings = settings.to_update().or_else(|_e| Err(Error::dump_failed()))?;
|
||||
let settings = settings.to_update().map_err(|e| Error::dump_failed(format!("importing settings for index {}; {}", index_uid, e)))?;
|
||||
apply_settings_update(write_txn, &index, settings)?;
|
||||
|
||||
// create iterator over documents in `documents.jsonl` to make batch importation
|
||||
@ -199,17 +200,17 @@ pub fn import_dump(
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DumpStatus {
|
||||
Done,
|
||||
Processing,
|
||||
DumpProcessFailed,
|
||||
InProgress,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DumpInfo {
|
||||
pub uid: String,
|
||||
pub status: DumpStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none", flatten)]
|
||||
pub error: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl DumpInfo {
|
||||
@ -217,15 +218,15 @@ impl DumpInfo {
|
||||
Self { uid, status, error: None }
|
||||
}
|
||||
|
||||
pub fn with_error(mut self, error: String) -> Self {
|
||||
self.status = DumpStatus::DumpProcessFailed;
|
||||
self.error = Some(error);
|
||||
pub fn with_error(mut self, error: ResponseError) -> Self {
|
||||
self.status = DumpStatus::Failed;
|
||||
self.error = Some(json!(error));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn dump_already_in_progress(&self) -> bool {
|
||||
self.status == DumpStatus::Processing
|
||||
self.status == DumpStatus::InProgress
|
||||
}
|
||||
|
||||
pub fn get_current() -> Option<Self> {
|
||||
@ -242,29 +243,29 @@ fn generate_uid() -> String {
|
||||
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string()
|
||||
}
|
||||
|
||||
/// Infer dumps_folder from dump_uid
|
||||
pub fn compressed_dumps_folder(dumps_folder: &Path, dump_uid: &str) -> PathBuf {
|
||||
dumps_folder.join(format!("{}.tar.gz", dump_uid))
|
||||
/// Infer dumps_dir from dump_uid
|
||||
pub fn compressed_dumps_dir(dumps_dir: &Path, dump_uid: &str) -> PathBuf {
|
||||
dumps_dir.join(format!("{}.dump", dump_uid))
|
||||
}
|
||||
|
||||
/// Write metadata in dump
|
||||
fn dump_metadata(data: &web::Data<Data>, folder_path: &Path, indexes: Vec<IndexResponse>) -> Result<(), Error> {
|
||||
fn dump_metadata(data: &web::Data<Data>, dir_path: &Path, indexes: Vec<IndexResponse>) -> Result<(), Error> {
|
||||
let (db_major, db_minor, db_patch) = data.db.version();
|
||||
let metadata = DumpMetadata::new(indexes, format!("{}.{}.{}", db_major, db_minor, db_patch));
|
||||
|
||||
metadata.to_path(folder_path)
|
||||
metadata.to_path(dir_path)
|
||||
}
|
||||
|
||||
/// Export settings of provided index in dump
|
||||
fn dump_index_settings(data: &web::Data<Data>, reader: &MainReader, folder_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
fn dump_index_settings(data: &web::Data<Data>, reader: &MainReader, dir_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let settings = crate::routes::setting::get_all_sync(data, reader, index_uid)?;
|
||||
|
||||
settings_to_path(&settings, folder_path)
|
||||
settings_to_path(&settings, dir_path)
|
||||
}
|
||||
|
||||
/// Export updates of provided index in dump
|
||||
fn dump_index_updates(data: &web::Data<Data>, reader: &UpdateReader, folder_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let updates_path = folder_path.join("updates.jsonl");
|
||||
fn dump_index_updates(data: &web::Data<Data>, reader: &UpdateReader, dir_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let updates_path = dir_path.join("updates.jsonl");
|
||||
let updates = crate::routes::index::get_all_updates_status_sync(data, reader, index_uid)?;
|
||||
|
||||
let file = File::create(updates_path)?;
|
||||
@ -278,15 +279,15 @@ fn dump_index_updates(data: &web::Data<Data>, reader: &UpdateReader, folder_path
|
||||
}
|
||||
|
||||
/// Export documents of provided index in dump
|
||||
fn dump_index_documents(data: &web::Data<Data>, reader: &MainReader, folder_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let documents_path = folder_path.join("documents.jsonl");
|
||||
fn dump_index_documents(data: &web::Data<Data>, reader: &MainReader, dir_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let documents_path = dir_path.join("documents.jsonl");
|
||||
let file = File::create(documents_path)?;
|
||||
let dump_batch_size = data.dump_batch_size;
|
||||
|
||||
let mut offset = 0;
|
||||
loop {
|
||||
let documents = crate::routes::document::get_all_documents_sync(data, reader, index_uid, offset, dump_batch_size, None)?;
|
||||
if documents.len() == 0 { break; } else { offset += dump_batch_size; }
|
||||
if documents.is_empty() { break; } else { offset += dump_batch_size; }
|
||||
|
||||
for document in documents {
|
||||
serde_json::to_writer(&file, &document)?;
|
||||
@ -299,14 +300,14 @@ fn dump_index_documents(data: &web::Data<Data>, reader: &MainReader, folder_path
|
||||
|
||||
/// Write error with a context.
|
||||
fn fail_dump_process<E: std::error::Error>(dump_info: DumpInfo, context: &str, error: E) {
|
||||
let error = format!("Something went wrong during dump process: {}; {}", context, error);
|
||||
let error_message = format!("{}; {}", context, error);
|
||||
|
||||
error!("{}", &error);
|
||||
dump_info.with_error(error).set_current();
|
||||
error!("Something went wrong during dump process: {}", &error_message);
|
||||
dump_info.with_error(Error::dump_failed(error_message).into()).set_current();
|
||||
}
|
||||
|
||||
/// Main function of dump.
|
||||
fn dump_process(data: web::Data<Data>, dumps_folder: PathBuf, dump_info: DumpInfo) {
|
||||
fn dump_process(data: web::Data<Data>, dumps_dir: PathBuf, dump_info: DumpInfo) {
|
||||
// open read transaction on Update
|
||||
let update_reader = match data.db.update_read_txn() {
|
||||
Ok(r) => r,
|
||||
@ -379,8 +380,8 @@ fn dump_process(data: web::Data<Data>, dumps_folder: PathBuf, dump_info: DumpInf
|
||||
}
|
||||
}
|
||||
|
||||
// compress dump in a file named `{dump_uid}.tar.gz` in `dumps_folder`
|
||||
if let Err(e) = crate::helpers::compression::to_tar_gz(&tmp_dir_path, &compressed_dumps_folder(&dumps_folder, &dump_info.uid)) {
|
||||
// compress dump in a file named `{dump_uid}.dump` in `dumps_dir`
|
||||
if let Err(e) = crate::helpers::compression::to_tar_gz(&tmp_dir_path, &compressed_dumps_dir(&dumps_dir, &dump_info.uid)) {
|
||||
fail_dump_process(dump_info, "compressing dump", e);
|
||||
return ;
|
||||
}
|
||||
@ -394,8 +395,8 @@ fn dump_process(data: web::Data<Data>, dumps_folder: PathBuf, dump_info: DumpInf
|
||||
resume.set_current();
|
||||
}
|
||||
|
||||
pub fn init_dump_process(data: &web::Data<Data>, dumps_folder: &Path) -> Result<DumpInfo, Error> {
|
||||
create_dir_all(dumps_folder).or(Err(Error::dump_failed()))?;
|
||||
pub fn init_dump_process(data: &web::Data<Data>, dumps_dir: &Path) -> Result<DumpInfo, Error> {
|
||||
create_dir_all(dumps_dir).map_err(|e| Error::dump_failed(format!("creating temporary directory {}", e)))?;
|
||||
|
||||
// check if a dump is already in progress
|
||||
if let Some(resume) = DumpInfo::get_current() {
|
||||
@ -407,17 +408,17 @@ pub fn init_dump_process(data: &web::Data<Data>, dumps_folder: &Path) -> Result<
|
||||
// generate a new dump info
|
||||
let info = DumpInfo::new(
|
||||
generate_uid(),
|
||||
DumpStatus::Processing
|
||||
DumpStatus::InProgress
|
||||
);
|
||||
|
||||
info.set_current();
|
||||
|
||||
let data = data.clone();
|
||||
let dumps_folder = dumps_folder.to_path_buf();
|
||||
let dumps_dir = dumps_dir.to_path_buf();
|
||||
let info_cloned = info.clone();
|
||||
// run dump process in a new thread
|
||||
thread::spawn(move ||
|
||||
dump_process(data, dumps_folder, info_cloned)
|
||||
dump_process(data, dumps_dir, info_cloned)
|
||||
);
|
||||
|
||||
Ok(info)
|
||||
|
@ -5,7 +5,7 @@ use actix_http::ResponseBuilder;
|
||||
use actix_web as aweb;
|
||||
use actix_web::error::{JsonPayloadError, QueryPayloadError};
|
||||
use actix_web::http::StatusCode;
|
||||
use serde_json::json;
|
||||
use serde::ser::{Serialize, Serializer, SerializeStruct};
|
||||
|
||||
use meilisearch_error::{ErrorCode, Code};
|
||||
|
||||
@ -34,6 +34,51 @@ impl From<Error> for ResponseError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_core::Error> for ResponseError {
|
||||
fn from(err: meilisearch_core::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_schema::Error> for ResponseError {
|
||||
fn from(err: meilisearch_schema::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FacetCountError> for ResponseError {
|
||||
fn from(err: FacetCountError) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ResponseError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let struct_name = "ResponseError";
|
||||
let field_count = 4;
|
||||
|
||||
let mut state = serializer.serialize_struct(struct_name, field_count)?;
|
||||
state.serialize_field("message", &self.to_string())?;
|
||||
state.serialize_field("errorCode", &self.error_name())?;
|
||||
state.serialize_field("errorType", &self.error_type())?;
|
||||
state.serialize_field("errorLink", &self.error_url())?;
|
||||
state.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl aweb::error::ResponseError for ResponseError {
|
||||
fn error_response(&self) -> aweb::HttpResponse {
|
||||
ResponseBuilder::new(self.status_code()).json(&self)
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
self.http_status()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
BadParameter(String, String),
|
||||
@ -54,7 +99,7 @@ pub enum Error {
|
||||
PayloadTooLarge,
|
||||
UnsupportedMediaType,
|
||||
DumpAlreadyInProgress,
|
||||
DumpProcessFailed,
|
||||
DumpProcessFailed(String),
|
||||
}
|
||||
|
||||
impl error::Error for Error {}
|
||||
@ -81,7 +126,7 @@ impl ErrorCode for Error {
|
||||
PayloadTooLarge => Code::PayloadTooLarge,
|
||||
UnsupportedMediaType => Code::UnsupportedMediaType,
|
||||
DumpAlreadyInProgress => Code::DumpAlreadyInProgress,
|
||||
DumpProcessFailed => Code::DumpProcessFailed,
|
||||
DumpProcessFailed(_) => Code::DumpProcessFailed,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -189,8 +234,8 @@ impl Error {
|
||||
Error::DumpAlreadyInProgress
|
||||
}
|
||||
|
||||
pub fn dump_failed() -> Error {
|
||||
Error::DumpProcessFailed
|
||||
pub fn dump_failed(message: String) -> Error {
|
||||
Error::DumpProcessFailed(message)
|
||||
}
|
||||
}
|
||||
|
||||
@ -215,44 +260,17 @@ impl fmt::Display for Error {
|
||||
Self::PayloadTooLarge => f.write_str("Payload too large"),
|
||||
Self::UnsupportedMediaType => f.write_str("Unsupported media type"),
|
||||
Self::DumpAlreadyInProgress => f.write_str("Another dump is already in progress"),
|
||||
Self::DumpProcessFailed => f.write_str("Dump process failed"),
|
||||
Self::DumpProcessFailed(message) => write!(f, "Dump process failed: {}", message),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl aweb::error::ResponseError for ResponseError {
|
||||
fn error_response(&self) -> aweb::HttpResponse {
|
||||
ResponseBuilder::new(self.status_code()).json(json!({
|
||||
"message": self.to_string(),
|
||||
"errorCode": self.error_name(),
|
||||
"errorType": self.error_type(),
|
||||
"errorLink": self.error_url(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
self.http_status()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(err: std::io::Error) -> Error {
|
||||
Error::Internal(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_core::Error> for ResponseError {
|
||||
fn from(err: meilisearch_core::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_schema::Error> for ResponseError {
|
||||
fn from(err: meilisearch_schema::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<actix_http::Error> for Error {
|
||||
fn from(err: actix_http::Error) -> Error {
|
||||
Error::Internal(err.to_string())
|
||||
@ -271,12 +289,6 @@ impl From<serde_json::error::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FacetCountError> for ResponseError {
|
||||
fn from(err: FacetCountError) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<JsonPayloadError> for Error {
|
||||
fn from(err: JsonPayloadError) -> Error {
|
||||
match err {
|
||||
|
@ -293,12 +293,18 @@ impl<'a> SearchBuilder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct MatchPosition {
|
||||
pub start: usize,
|
||||
pub length: usize,
|
||||
}
|
||||
|
||||
impl PartialOrd for MatchPosition {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for MatchPosition {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
match self.start.cmp(&other.start) {
|
||||
|
@ -52,7 +52,7 @@ async fn main() -> Result<(), MainError> {
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
if let Some(path) = &opt.load_from_snapshot {
|
||||
if let Some(path) = &opt.import_snapshot {
|
||||
snapshot::load_snapshot(&opt.db_path, path, opt.ignore_snapshot_if_db_exists, opt.ignore_missing_snapshot)?;
|
||||
}
|
||||
|
||||
@ -74,8 +74,8 @@ async fn main() -> Result<(), MainError> {
|
||||
dump::import_dump(&data, path, opt.dump_batch_size)?;
|
||||
}
|
||||
|
||||
if let Some(path) = &opt.snapshot_path {
|
||||
snapshot::schedule_snapshot(data.clone(), &path, opt.snapshot_interval_sec.unwrap_or(86400))?;
|
||||
if opt.schedule_snapshot {
|
||||
snapshot::schedule_snapshot(data.clone(), &opt.snapshot_dir, opt.snapshot_interval_sec.unwrap_or(86400))?;
|
||||
}
|
||||
|
||||
print_launch_resume(&opt, &data);
|
||||
|
@ -97,31 +97,35 @@ pub struct Opt {
|
||||
/// Defines the path of the snapshot file to import.
|
||||
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at
|
||||
/// the given path. If this option is not specified no snapshot is imported.
|
||||
#[structopt(long, env = "MEILI_LOAD_FROM_SNAPSHOT")]
|
||||
pub load_from_snapshot: Option<PathBuf>,
|
||||
#[structopt(long)]
|
||||
pub import_snapshot: Option<PathBuf>,
|
||||
|
||||
/// The engine will ignore a missing snapshot and not return an error in such case.
|
||||
#[structopt(long, requires = "load-from-snapshot", env = "MEILI_IGNORE_MISSING_SNAPSHOT")]
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
pub ignore_missing_snapshot: bool,
|
||||
|
||||
/// The engine will skip snapshot importation and not return an error in such case.
|
||||
#[structopt(long, requires = "load-from-snapshot", env = "MEILI_IGNORE_SNAPSHOT_IF_DB_EXISTS")]
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
pub ignore_snapshot_if_db_exists: bool,
|
||||
|
||||
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_PATH")]
|
||||
pub snapshot_path: Option<PathBuf>,
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
||||
pub snapshot_dir: PathBuf,
|
||||
|
||||
/// Activate snapshot scheduling.
|
||||
#[structopt(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
||||
pub schedule_snapshot: bool,
|
||||
|
||||
/// Defines time interval, in seconds, between each snapshot creation.
|
||||
#[structopt(long, requires = "snapshot-path", env = "MEILI_SNAPSHOT_INTERVAL_SEC")]
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC")]
|
||||
pub snapshot_interval_sec: Option<u64>,
|
||||
|
||||
/// Folder where dumps are created when the dump route is called.
|
||||
#[structopt(long, env = "MEILI_DUMPS_FOLDER", default_value = "dumps/")]
|
||||
pub dumps_folder: PathBuf,
|
||||
#[structopt(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
||||
pub dumps_dir: PathBuf,
|
||||
|
||||
/// Import a dump from the specified path, must be a `.tar.gz` file.
|
||||
#[structopt(long, env = "MEILI_IMPORT_DUMP", conflicts_with = "load-from-snapshot")]
|
||||
#[structopt(long, conflicts_with = "import-snapshot")]
|
||||
pub import_dump: Option<PathBuf>,
|
||||
|
||||
/// The batch size used in the importation process, the bigger it is the faster the dump is created.
|
||||
|
@ -45,7 +45,8 @@ async fn get_document(
|
||||
|
||||
let reader = data.db.main_read_txn()?;
|
||||
|
||||
let internal_id = index.main
|
||||
let internal_id = index
|
||||
.main
|
||||
.external_to_internal_docid(&reader, &path.document_id)?
|
||||
.ok_or(Error::document_not_found(&path.document_id))?;
|
||||
|
||||
@ -166,47 +167,41 @@ async fn update_multiple_documents(
|
||||
body: web::Json<Vec<Document>>,
|
||||
is_partial: bool,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let mut schema = index
|
||||
.main
|
||||
.schema(&reader)?
|
||||
.ok_or(meilisearch_core::Error::SchemaMissing)?;
|
||||
|
||||
let mut schema = index
|
||||
.main
|
||||
.schema(&reader)?
|
||||
.ok_or(meilisearch_core::Error::SchemaMissing)?;
|
||||
if schema.primary_key().is_none() {
|
||||
let id = match ¶ms.primary_key {
|
||||
Some(id) => id.to_string(),
|
||||
None => body
|
||||
.first()
|
||||
.and_then(find_primary_key)
|
||||
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?,
|
||||
};
|
||||
|
||||
if schema.primary_key().is_none() {
|
||||
let id = match ¶ms.primary_key {
|
||||
Some(id) => id.to_string(),
|
||||
None => body
|
||||
.first()
|
||||
.and_then(find_primary_key)
|
||||
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?
|
||||
schema.set_primary_key(&id).map_err(Error::bad_request)?;
|
||||
|
||||
data.db.main_write(|w| index.main.put_schema(w, &schema))?;
|
||||
}
|
||||
|
||||
let mut document_addition = if is_partial {
|
||||
index.documents_partial_addition()
|
||||
} else {
|
||||
index.documents_addition()
|
||||
};
|
||||
|
||||
schema
|
||||
.set_primary_key(&id)
|
||||
.map_err(Error::bad_request)?;
|
||||
for document in body.into_inner() {
|
||||
document_addition.update_document(document);
|
||||
}
|
||||
|
||||
data.db.main_write(|w| index.main.put_schema(w, &schema))?;
|
||||
}
|
||||
|
||||
let mut document_addition = if is_partial {
|
||||
index.documents_partial_addition()
|
||||
} else {
|
||||
index.documents_addition()
|
||||
};
|
||||
|
||||
for document in body.into_inner() {
|
||||
document_addition.update_document(document);
|
||||
}
|
||||
|
||||
let update_id = data.db.update_write(|w| document_addition.finalize(w))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
Ok(data.db.update_write(|w| document_addition.finalize(w))?)
|
||||
})?;
|
||||
return Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)));
|
||||
}
|
||||
|
||||
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
||||
@ -243,7 +238,6 @@ async fn delete_documents(
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
|
||||
let mut documents_deletion = index.documents_deletion();
|
||||
|
||||
for document_id in body.into_inner() {
|
||||
|
@ -5,7 +5,7 @@ use actix_web::{get, post};
|
||||
use actix_web::{HttpResponse, web};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::dump::{DumpInfo, DumpStatus, compressed_dumps_folder, init_dump_process};
|
||||
use crate::dump::{DumpInfo, DumpStatus, compressed_dumps_dir, init_dump_process};
|
||||
use crate::Data;
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::Authentication;
|
||||
@ -19,8 +19,8 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
async fn trigger_dump(
|
||||
data: web::Data<Data>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let dumps_folder = Path::new(&data.dumps_folder);
|
||||
match init_dump_process(&data, &dumps_folder) {
|
||||
let dumps_dir = Path::new(&data.dumps_dir);
|
||||
match init_dump_process(&data, &dumps_dir) {
|
||||
Ok(resume) => Ok(HttpResponse::Accepted().json(resume)),
|
||||
Err(e) => Err(e.into())
|
||||
}
|
||||
@ -42,7 +42,7 @@ async fn get_dump_status(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<DumpParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let dumps_folder = Path::new(&data.dumps_folder);
|
||||
let dumps_dir = Path::new(&data.dumps_dir);
|
||||
let dump_uid = &path.dump_uid;
|
||||
|
||||
if let Some(resume) = DumpInfo::get_current() {
|
||||
@ -51,7 +51,7 @@ async fn get_dump_status(
|
||||
}
|
||||
}
|
||||
|
||||
if File::open(compressed_dumps_folder(Path::new(dumps_folder), dump_uid)).is_ok() {
|
||||
if File::open(compressed_dumps_dir(Path::new(dumps_dir), dump_uid)).is_ok() {
|
||||
let resume = DumpInfo::new(
|
||||
dump_uid.into(),
|
||||
DumpStatus::Done
|
||||
|
@ -1,47 +1,13 @@
|
||||
use actix_web::get;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web::{get, put};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::Authentication;
|
||||
use crate::Data;
|
||||
use crate::error::ResponseError;
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(get_health).service(change_healthyness);
|
||||
cfg.service(get_health);
|
||||
}
|
||||
|
||||
#[get("/health")]
|
||||
async fn get_health(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
if let Ok(Some(_)) = data.db.get_health(&reader) {
|
||||
return Err(Error::Maintenance.into());
|
||||
}
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
async fn set_healthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
data.db.main_write(|w| data.db.set_healthy(w))?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
async fn set_unhealthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
data.db.main_write(|w| data.db.set_unhealthy(w))?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
struct HealthBody {
|
||||
health: bool,
|
||||
}
|
||||
|
||||
#[put("/health", wrap = "Authentication::Private")]
|
||||
async fn change_healthyness(
|
||||
data: web::Data<Data>,
|
||||
body: web::Json<HealthBody>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
if body.health {
|
||||
set_healthy(data).await
|
||||
} else {
|
||||
set_unhealthy(data).await
|
||||
}
|
||||
async fn get_health() -> Result<HttpResponse, ResponseError> {
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use actix_web::{delete, get, post};
|
||||
use actix_web::{web, HttpResponse};
|
||||
@ -53,13 +53,12 @@ async fn update_all(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Settings>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = body
|
||||
.into_inner()
|
||||
.to_update()
|
||||
.map_err(Error::bad_request)?;
|
||||
|
||||
let update_id = data.db.update_write::<_, _, Error>(|writer| {
|
||||
update_all_settings_txn(&data, settings, &path.index_uid, writer)
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
Ok(data.db.update_write::<_, _, ResponseError>(|writer| {
|
||||
let settings = body.into_inner().to_update().map_err(Error::bad_request)?;
|
||||
let update_id = index.settings_update(writer, settings)?;
|
||||
Ok(update_id)
|
||||
})?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
@ -71,11 +70,7 @@ pub fn get_all_sync(data: &web::Data<Data>, reader: &MainReader, index_uid: &str
|
||||
.open_index(index_uid)
|
||||
.ok_or(Error::index_not_found(index_uid))?;
|
||||
|
||||
let stop_words: BTreeSet<String> = index
|
||||
.main
|
||||
.stop_words(reader)?
|
||||
.into_iter()
|
||||
.collect();
|
||||
let stop_words: BTreeSet<String> = index.main.stop_words(&reader)?.into_iter().collect();
|
||||
|
||||
let synonyms_list = index.main.synonyms(reader)?;
|
||||
|
||||
@ -94,22 +89,19 @@ pub fn get_all_sync(data: &web::Data<Data>, reader: &MainReader, index_uid: &str
|
||||
.map(|r| r.to_string())
|
||||
.collect();
|
||||
|
||||
|
||||
let schema = index.main.schema(reader)?;
|
||||
let schema = index.main.schema(&reader)?;
|
||||
|
||||
let distinct_attribute = match (index.main.distinct_attribute(reader)?, &schema) {
|
||||
(Some(id), Some(schema)) => schema.name(id).map(str::to_string),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(reader)?) {
|
||||
(Some(schema), Some(attrs)) => {
|
||||
attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id))
|
||||
.map(str::to_string)
|
||||
.collect()
|
||||
}
|
||||
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(&reader)?) {
|
||||
(Some(schema), Some(attrs)) => attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id))
|
||||
.map(str::to_string)
|
||||
.collect(),
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
@ -159,7 +151,9 @@ async fn delete_all(
|
||||
attributes_for_faceting: UpdateState::Clear,
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -198,18 +192,17 @@ async fn update_rules(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
ranking_rules: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = Settings {
|
||||
ranking_rules: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -232,7 +225,9 @@ async fn delete_rules(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -269,18 +264,17 @@ async fn update_distinct(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<String>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
distinct_attribute: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = Settings {
|
||||
distinct_attribute: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -303,7 +297,9 @@ async fn delete_distinct(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -322,8 +318,7 @@ async fn get_searchable(
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let schema = index.main.schema(&reader)?;
|
||||
let searchable_attributes: Option<Vec<String>> =
|
||||
schema.as_ref().map(get_indexed_attributes);
|
||||
let searchable_attributes: Option<Vec<String>> = schema.as_ref().map(get_indexed_attributes);
|
||||
|
||||
Ok(HttpResponse::Ok().json(searchable_attributes))
|
||||
}
|
||||
@ -337,19 +332,18 @@ async fn update_searchable(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
searchable_attributes: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = Settings {
|
||||
searchable_attributes: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -372,7 +366,9 @@ async fn delete_searchable(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -405,20 +401,19 @@ async fn get_displayed(
|
||||
async fn update_displayed(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<HashSet<String>>>,
|
||||
body: web::Json<Option<BTreeSet<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
displayed_attributes: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = Settings {
|
||||
displayed_attributes: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -441,7 +436,9 @@ async fn delete_displayed(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -459,20 +456,16 @@ async fn get_attributes_for_faceting(
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let attributes_for_faceting = data
|
||||
.db
|
||||
.main_read::<_, _, ResponseError>(|reader| {
|
||||
let attributes_for_faceting = data.db.main_read::<_, _, ResponseError>(|reader| {
|
||||
let schema = index.main.schema(reader)?;
|
||||
let attrs = index.main.attributes_for_faceting(reader)?;
|
||||
let attr_names = match (&schema, &attrs) {
|
||||
(Some(schema), Some(attrs)) => {
|
||||
attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id))
|
||||
.map(str::to_string)
|
||||
.collect()
|
||||
}
|
||||
_ => vec![]
|
||||
(Some(schema), Some(attrs)) => attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id))
|
||||
.map(str::to_string)
|
||||
.collect(),
|
||||
_ => vec![],
|
||||
};
|
||||
Ok(attr_names)
|
||||
})?;
|
||||
@ -489,18 +482,17 @@ async fn update_attributes_for_faceting(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
attributes_for_faceting: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = Settings {
|
||||
attributes_for_faceting: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -523,7 +515,9 @@ async fn delete_attributes_for_faceting(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -532,18 +526,20 @@ fn get_indexed_attributes(schema: &Schema) -> Vec<String> {
|
||||
if schema.is_indexed_all() {
|
||||
["*"].iter().map(|s| s.to_string()).collect()
|
||||
} else {
|
||||
schema.indexed_name()
|
||||
schema
|
||||
.indexed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_displayed_attributes(schema: &Schema) -> HashSet<String> {
|
||||
fn get_displayed_attributes(schema: &Schema) -> BTreeSet<String> {
|
||||
if schema.is_displayed_all() {
|
||||
["*"].iter().map(|s| s.to_string()).collect()
|
||||
} else {
|
||||
schema.displayed_name()
|
||||
schema
|
||||
.displayed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect()
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::{HashMap, BTreeMap};
|
||||
|
||||
use actix_web::web;
|
||||
use actix_web::HttpResponse;
|
||||
@ -24,7 +24,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
struct IndexStatsResponse {
|
||||
number_of_documents: u64,
|
||||
is_indexing: bool,
|
||||
fields_distribution: HashMap<String, usize>,
|
||||
fields_distribution: BTreeMap<String, usize>,
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/stats", wrap = "Authentication::Private")]
|
||||
|
@ -39,17 +39,16 @@ async fn update(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<BTreeSet<String>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = SettingsUpdate {
|
||||
stop_words: UpdateState::Update(body.into_inner()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let settings = SettingsUpdate {
|
||||
stop_words: UpdateState::Update(body.into_inner()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -72,7 +71,9 @@ async fn delete(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
@ -50,17 +50,16 @@ async fn update(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<BTreeMap<String, Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = SettingsUpdate {
|
||||
synonyms: UpdateState::Update(body.into_inner()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let settings = SettingsUpdate {
|
||||
synonyms: UpdateState::Update(body.into_inner()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -83,7 +82,9 @@ async fn delete(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
@ -20,9 +20,9 @@ pub fn load_snapshot(
|
||||
if !db_path.exists() && snapshot_path.exists() {
|
||||
compression::from_tar_gz(snapshot_path, db_path)
|
||||
} else if db_path.exists() && !ignore_snapshot_if_db_exists {
|
||||
Err(Error::Internal(format!("database already exists at {:?}", db_path)))
|
||||
Err(Error::Internal(format!("database already exists at {:?}, try to delete it or rename it", db_path.canonicalize().unwrap_or(db_path.into()))))
|
||||
} else if !snapshot_path.exists() && !ignore_missing_snapshot {
|
||||
Err(Error::Internal(format!("snapshot doesn't exist at {:?}", snapshot_path)))
|
||||
Err(Error::Internal(format!("snapshot doesn't exist at {:?}", snapshot_path.canonicalize().unwrap_or(snapshot_path.into()))))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
@ -33,7 +33,7 @@ pub fn create_snapshot(data: &Data, snapshot_path: &Path) -> Result<(), Error> {
|
||||
|
||||
data.db.copy_and_compact_to_path(tmp_dir.path())?;
|
||||
|
||||
compression::to_tar_gz(tmp_dir.path(), snapshot_path).or_else(|e| Err(Error::Internal(format!("something went wrong during snapshot compression: {}", e))))
|
||||
compression::to_tar_gz(tmp_dir.path(), snapshot_path).map_err(|e| Error::Internal(format!("something went wrong during snapshot compression: {}", e)))
|
||||
}
|
||||
|
||||
pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Result<(), Error> {
|
||||
@ -42,13 +42,13 @@ pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Re
|
||||
}
|
||||
let db_name = Path::new(&data.db_path).file_name().ok_or_else(|| Error::Internal("invalid database name".to_string()))?;
|
||||
create_dir_all(snapshot_dir)?;
|
||||
let snapshot_path = snapshot_dir.join(format!("{}.tar.gz", db_name.to_str().unwrap_or("data.ms")));
|
||||
let snapshot_path = snapshot_dir.join(format!("{}.snapshot", db_name.to_str().unwrap_or("data.ms")));
|
||||
|
||||
thread::spawn(move || loop {
|
||||
thread::sleep(Duration::from_secs(time_gap_s));
|
||||
if let Err(e) = create_snapshot(&data, &snapshot_path) {
|
||||
error!("Unsuccessful snapshot creation: {}", e);
|
||||
}
|
||||
thread::sleep(Duration::from_secs(time_gap_s));
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@ -67,13 +67,13 @@ mod tests {
|
||||
let test_dir = tempdir.path();
|
||||
let src_dir = test_dir.join("src");
|
||||
let dest_dir = test_dir.join("complex/destination/path/");
|
||||
let archive_path = test_dir.join("archive.tar.gz");
|
||||
let archive_path = test_dir.join("archive.snapshot");
|
||||
|
||||
let file_1_relative = Path::new("file1.txt");
|
||||
let subfolder_relative = Path::new("subfolder/");
|
||||
let file_2_relative = Path::new("subfolder/file2.txt");
|
||||
let subdir_relative = Path::new("subdir/");
|
||||
let file_2_relative = Path::new("subdir/file2.txt");
|
||||
|
||||
create_dir_all(src_dir.join(subfolder_relative)).unwrap();
|
||||
create_dir_all(src_dir.join(subdir_relative)).unwrap();
|
||||
fs::File::create(src_dir.join(file_1_relative)).unwrap().write_all(b"Hello_file_1").unwrap();
|
||||
fs::File::create(src_dir.join(file_2_relative)).unwrap().write_all(b"Hello_file_2").unwrap();
|
||||
|
||||
@ -84,7 +84,7 @@ mod tests {
|
||||
|
||||
assert!(dest_dir.exists());
|
||||
assert!(dest_dir.join(file_1_relative).exists());
|
||||
assert!(dest_dir.join(subfolder_relative).exists());
|
||||
assert!(dest_dir.join(subdir_relative).exists());
|
||||
assert!(dest_dir.join(file_2_relative).exists());
|
||||
|
||||
let contents = fs::read_to_string(dest_dir.join(file_1_relative)).unwrap();
|
||||
|
@ -15,15 +15,24 @@ use meilisearch_http::option::Opt;
|
||||
#[macro_export]
|
||||
macro_rules! test_post_get_search {
|
||||
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
|
||||
let post_query: meilisearch_http::routes::search::SearchQueryPost = serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||
let post_query: meilisearch_http::routes::search::SearchQueryPost =
|
||||
serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
|
||||
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
|
||||
let ($response, $status_code) = $server.search_get(&get_query).await;
|
||||
let _ =::std::panic::catch_unwind(|| $block)
|
||||
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
|
||||
panic!(
|
||||
"panic in get route: {:?}",
|
||||
e.downcast_ref::<&str>().unwrap()
|
||||
)
|
||||
});
|
||||
let ($response, $status_code) = $server.search_post($query).await;
|
||||
let _ = ::std::panic::catch_unwind(|| $block)
|
||||
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
|
||||
panic!(
|
||||
"panic in post route: {:?}",
|
||||
e.downcast_ref::<&str>().unwrap()
|
||||
)
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
@ -40,7 +49,7 @@ impl Server {
|
||||
|
||||
let opt = Opt {
|
||||
db_path: tmp_dir.path().join("db").to_str().unwrap().to_string(),
|
||||
dumps_folder: tmp_dir.path().join("dump"),
|
||||
dumps_dir: tmp_dir.path().join("dump"),
|
||||
dump_batch_size: 16,
|
||||
http_addr: "127.0.0.1:7700".to_owned(),
|
||||
master_key: None,
|
||||
@ -61,7 +70,6 @@ impl Server {
|
||||
}
|
||||
|
||||
pub async fn test_server() -> Self {
|
||||
|
||||
let mut server = Self::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
@ -151,7 +159,8 @@ impl Server {
|
||||
pub async fn get_request(&mut self, url: &str) -> (Value, StatusCode) {
|
||||
eprintln!("get_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::get().uri(url).to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
@ -165,7 +174,8 @@ impl Server {
|
||||
pub async fn post_request(&self, url: &str, body: Value) -> (Value, StatusCode) {
|
||||
eprintln!("post_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::post()
|
||||
.uri(url)
|
||||
@ -183,8 +193,7 @@ impl Server {
|
||||
eprintln!("post_request_async: {}", url);
|
||||
|
||||
let (response, status_code) = self.post_request(url, body).await;
|
||||
// eprintln!("response: {}", response);
|
||||
assert_eq!(status_code, 202);
|
||||
eprintln!("response: {}", response);
|
||||
assert!(response["updateId"].as_u64().is_some());
|
||||
self.wait_update_id(response["updateId"].as_u64().unwrap())
|
||||
.await;
|
||||
@ -194,7 +203,8 @@ impl Server {
|
||||
pub async fn put_request(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
|
||||
eprintln!("put_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::put()
|
||||
.uri(url)
|
||||
@ -222,7 +232,8 @@ impl Server {
|
||||
pub async fn delete_request(&mut self, url: &str) -> (Value, StatusCode) {
|
||||
eprintln!("delete_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::delete().uri(url).to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
@ -340,9 +351,9 @@ impl Server {
|
||||
self.delete_request_async(&url).await
|
||||
}
|
||||
|
||||
pub async fn delete_multiple_documents(&mut self, body: Value) {
|
||||
pub async fn delete_multiple_documents(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/delete-batch", self.uid);
|
||||
self.post_request_async(&url, body).await;
|
||||
self.post_request_async(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn get_all_settings(&mut self) -> (Value, StatusCode) {
|
||||
@ -355,6 +366,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_all_settings_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_all_settings(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -390,6 +406,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_distinct_attribute_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/distinct-attribute", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_distinct_attribute(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/distinct-attribute", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -410,6 +431,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_searchable_attributes_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/searchable-attributes", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_searchable_attributes(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/searchable-attributes", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -425,11 +451,39 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_displayed_attributes_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/displayed-attributes", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_displayed_attributes(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/displayed-attributes", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
}
|
||||
|
||||
pub async fn get_attributes_for_faceting(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.get_request(&url).await
|
||||
}
|
||||
|
||||
pub async fn update_attributes_for_faceting(&mut self, body: Value) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_attributes_for_faceting_sync(
|
||||
&mut self,
|
||||
body: Value,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_attributes_for_faceting(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
}
|
||||
|
||||
pub async fn get_synonyms(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/synonyms", self.uid);
|
||||
self.get_request(&url).await
|
||||
@ -440,6 +494,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_synonyms_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/synonyms", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_synonyms(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/synonyms", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -455,6 +514,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_stop_words_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/stop-words", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_stop_words(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/stop-words", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
|
@ -192,7 +192,9 @@ async fn add_document_with_long_field() {
|
||||
"url":"/configuration/app/web.html#locations"
|
||||
}]);
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
let (response, _status) = server.search_post(json!({ "q": "request_buffering" })).await;
|
||||
let (response, _status) = server
|
||||
.search_post(json!({ "q": "request_buffering" }))
|
||||
.await;
|
||||
assert!(!response["hits"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
@ -213,5 +215,8 @@ async fn documents_with_same_id_are_overwritten() {
|
||||
server.add_or_replace_multiple_documents(documents).await;
|
||||
let (response, _status) = server.get_all_documents().await;
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
assert_eq!(response.as_array().unwrap()[0].as_object().unwrap()["content"], "test2");
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0].as_object().unwrap()["content"],
|
||||
"test2"
|
||||
);
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ async fn trigger_dump_concurently_should_return_conflict() {
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn get_dump_status_early_should_return_processing() {
|
||||
async fn get_dump_status_early_should_return_in_progress() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
|
||||
@ -116,7 +116,7 @@ async fn get_dump_status_early_should_return_processing() {
|
||||
|
||||
let expected = json!({
|
||||
"uid": dump_uid,
|
||||
"status": "processing"
|
||||
"status": "in_progress"
|
||||
});
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
@ -150,6 +150,39 @@ async fn get_dump_status_should_return_done() {
|
||||
assert_json_eq!(expected.clone(), value.clone(), ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn get_dump_status_should_return_error_provoking_it() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
|
||||
let (value, status_code) = server.trigger_dump().await;
|
||||
|
||||
// removing destination directory provoking `No such file or directory` error
|
||||
std::fs::remove_dir(server.data().dumps_dir.clone()).unwrap();
|
||||
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let dump_uid = value["uid"].as_str().unwrap().to_string();
|
||||
|
||||
let expected = json!({
|
||||
"uid": dump_uid.clone(),
|
||||
"status": "failed",
|
||||
"message": "Dump process failed: compressing dump; No such file or directory (os error 2)",
|
||||
"errorCode": "dump_process_failed",
|
||||
"errorType": "internal_error",
|
||||
"errorLink": "https://docs.meilisearch.com/errors#dump_process_failed"
|
||||
});
|
||||
|
||||
thread::sleep(Duration::from_secs(1)); // wait dump until process end
|
||||
|
||||
let (value, status_code) = server.get_dump_status(&dump_uid).await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_json_eq!(expected.clone(), value.clone(), ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn dump_metadata_should_be_valid() {
|
||||
@ -164,11 +197,11 @@ async fn dump_metadata_should_be_valid() {
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_folder = Path::new(&server.data().dumps_folder);
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_folder.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap();
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("metadata.json")).unwrap();
|
||||
let mut metadata: serde_json::Value = serde_json::from_reader(file).unwrap();
|
||||
@ -205,9 +238,9 @@ async fn dump_gzip_should_have_been_created() {
|
||||
|
||||
|
||||
let dump_uid = trigger_and_wait_dump(&mut server).await;
|
||||
let dumps_folder = Path::new(&server.data().dumps_folder);
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
|
||||
let compressed_path = dumps_folder.join(format!("{}.tar.gz", dump_uid));
|
||||
let compressed_path = dumps_dir.join(format!("{}.dump", dump_uid));
|
||||
assert!(File::open(compressed_path).is_ok());
|
||||
}
|
||||
|
||||
@ -279,11 +312,11 @@ async fn dump_index_settings_should_be_valid() {
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_folder = Path::new(&server.data().dumps_folder);
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_folder.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap();
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("test").join("settings.json")).unwrap();
|
||||
let settings: serde_json::Value = serde_json::from_reader(file).unwrap();
|
||||
@ -303,11 +336,11 @@ async fn dump_index_documents_should_be_valid() {
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_folder = Path::new(&server.data().dumps_folder);
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_folder.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap();
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("test").join("documents.jsonl")).unwrap();
|
||||
let documents = read_all_jsonline(file);
|
||||
@ -327,11 +360,11 @@ async fn dump_index_updates_should_be_valid() {
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_folder = Path::new(&server.data().dumps_folder);
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_folder.join(&format!("{}.tar.gz", uid)), tmp_dir_path).unwrap();
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("test").join("updates.jsonl")).unwrap();
|
||||
let mut updates = read_all_jsonline(file);
|
||||
|
@ -1,6 +1,3 @@
|
||||
use serde_json::json;
|
||||
use std::convert::Into;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -10,29 +7,5 @@ async fn test_healthyness() {
|
||||
// Check that the server is healthy
|
||||
|
||||
let (_response, status_code) = server.get_health().await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
// Set the serve Unhealthy
|
||||
let body = json!({
|
||||
"health": false,
|
||||
});
|
||||
let (_response, status_code) = server.update_health(body).await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
// Check that the server is unhealthy
|
||||
|
||||
let (_response, status_code) = server.get_health().await;
|
||||
assert_eq!(status_code, 503);
|
||||
|
||||
// Set the server healthy
|
||||
let body = json!({
|
||||
"health": true,
|
||||
});
|
||||
let (_response, status_code) = server.update_health(body).await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
// Check if the server is healthy
|
||||
|
||||
let (_response, status_code) = server.get_health().await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(status_code, 204);
|
||||
}
|
||||
|
@ -777,3 +777,33 @@ async fn update_existing_primary_key_is_error() {
|
||||
assert_eq!(response["errorCode"], "primary_key_already_present");
|
||||
assert_eq!(response["errorType"], "invalid_request_error");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_facets_distribution_attribute() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let (response, _status_code) = server.get_index_stats().await;
|
||||
|
||||
let expected = json!({
|
||||
"isIndexing": false,
|
||||
"numberOfDocuments":77,
|
||||
"fieldsDistribution":{
|
||||
"age":77,
|
||||
"gender":77,
|
||||
"phone":77,
|
||||
"name":77,
|
||||
"registered":77,
|
||||
"latitude":77,
|
||||
"email":77,
|
||||
"tags":77,
|
||||
"longitude":77,
|
||||
"color":77,
|
||||
"address":77,
|
||||
"balance":77,
|
||||
"about":77,
|
||||
"picture":77,
|
||||
},
|
||||
});
|
||||
|
||||
assert_json_eq!(expected, response, ordered: true);
|
||||
}
|
||||
|
446
meilisearch-http/tests/lazy_index_creation.rs
Normal file
446
meilisearch-http/tests/lazy_index_creation.rs
Normal file
@ -0,0 +1,446 @@
|
||||
use serde_json::json;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_pushing_documents() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Add documents
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/movies/documents?primaryKey=title";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
server.wait_update_id(update_id).await;
|
||||
|
||||
// 3 - Check update success
|
||||
|
||||
let (response, status_code) = server.get_update_status(update_id).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_pushing_documents_and_discover_pk() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Add documents
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/movies/documents";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
server.wait_update_id(update_id).await;
|
||||
|
||||
// 3 - Check update success
|
||||
|
||||
let (response, status_code) = server.get_update_status(update_id).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_pushing_documents_with_wrong_name() {
|
||||
let server = common::Server::with_uid("wrong&name");
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/wrong&name/documents?primaryKey=title";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["errorCode"], "invalid_index_uid");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_add_documents_failed() {
|
||||
let mut server = common::Server::with_uid("wrong&name");
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/wrong&name/documents";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["errorCode"], "invalid_index_uid");
|
||||
|
||||
let (_, status_code) = server.get_index().await;
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_settings() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
],
|
||||
"stopWords": [
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["name"],
|
||||
});
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_settings_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"other",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
],
|
||||
"stopWords": [
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"anotherSettings": ["name"],
|
||||
});
|
||||
|
||||
let (_, status_code) = server.update_all_settings_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_ranking_rules() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!([
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
]);
|
||||
|
||||
server.update_ranking_rules(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_ranking_rules_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": 123,
|
||||
});
|
||||
|
||||
let (_, status_code) = server.update_ranking_rules_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_distinct_attribute() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!("type");
|
||||
|
||||
server.update_distinct_attribute(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_distinct_attribute_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (resp, status_code) = server.update_distinct_attribute_sync(body.clone()).await;
|
||||
eprintln!("resp: {:?}", resp);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (resp, status_code) = server.get_all_settings().await;
|
||||
eprintln!("resp: {:?}", resp);
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_searchable_attributes() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["title", "description"]);
|
||||
|
||||
server.update_searchable_attributes(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_searchable_attributes_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_searchable_attributes_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_displayed_attributes() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["title", "description"]);
|
||||
|
||||
server.update_displayed_attributes(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_displayed_attributes_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_displayed_attributes_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_attributes_for_faceting() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["title", "description"]);
|
||||
|
||||
server.update_attributes_for_faceting(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_attributes_for_faceting_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server
|
||||
.update_attributes_for_faceting_sync(body.clone())
|
||||
.await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_synonyms() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
});
|
||||
|
||||
server.update_synonyms(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_synonyms_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_synonyms_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_stop_words() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["le", "la", "les"]);
|
||||
|
||||
server.update_stop_words(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_stop_words_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_stop_words_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
@ -6,6 +6,139 @@ use serde_json::Value;
|
||||
|
||||
#[macro_use] mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json! ({
|
||||
"q": "exercitation"
|
||||
});
|
||||
|
||||
let expected = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
"phone": "+1 (995) 479-3174",
|
||||
"address": "442 Beverly Road, Ventress, New Mexico, 3361",
|
||||
"about": "Exercitation officia mollit proident nostrud ea. Pariatur voluptate labore nostrud magna duis non elit et incididunt Lorem velit duis amet commodo. Irure in velit laboris pariatur. Do tempor ex deserunt duis minim amet.\r\n",
|
||||
"registered": "2020-03-18T11:12:21 -01:00",
|
||||
"latitude": -24.356932,
|
||||
"longitude": 27.184808,
|
||||
"tags": [
|
||||
"new issue",
|
||||
"bug"
|
||||
],
|
||||
"isActive": true
|
||||
},
|
||||
{
|
||||
"id": 59,
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
"phone": "+1 (912) 430-3243",
|
||||
"address": "883 Dennett Place, Knowlton, New Mexico, 9219",
|
||||
"about": "Exercitation minim esse proident cillum velit et deserunt incididunt adipisicing minim. Cillum Lorem consectetur laborum id consequat exercitation velit. Magna dolor excepteur sunt deserunt dolor ullamco non sint proident ipsum. Reprehenderit voluptate sit veniam consectetur ea sunt duis labore deserunt ipsum aute. Eiusmod aliqua anim voluptate id duis tempor aliqua commodo sunt. Do officia ea consectetur nostrud eiusmod laborum.\r\n",
|
||||
"registered": "2019-12-07T07:33:15 -01:00",
|
||||
"latitude": -60.812605,
|
||||
"longitude": -27.129016,
|
||||
"tags": [
|
||||
"bug",
|
||||
"new issue"
|
||||
],
|
||||
"isActive": true
|
||||
},
|
||||
{
|
||||
"id": 49,
|
||||
"balance": "$1,476.39",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "brown",
|
||||
"name": "Maureen Dale",
|
||||
"gender": "female",
|
||||
"email": "maureendale@chorizon.com",
|
||||
"phone": "+1 (984) 538-3684",
|
||||
"address": "817 Newton Street, Bannock, Wyoming, 1468",
|
||||
"about": "Tempor mollit exercitation excepteur cupidatat reprehenderit ad ex. Nulla laborum proident incididunt quis. Esse laborum deserunt qui anim. Sunt incididunt pariatur cillum anim proident eu ullamco dolor excepteur. Ullamco amet culpa nostrud adipisicing duis aliqua consequat duis non eu id mollit velit. Deserunt ullamco amet in occaecat.\r\n",
|
||||
"registered": "2018-04-26T06:04:40 -02:00",
|
||||
"latitude": -64.196802,
|
||||
"longitude": -117.396238,
|
||||
"tags": [
|
||||
"wontfix"
|
||||
],
|
||||
"isActive": true
|
||||
}
|
||||
]);
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
let hits: Vec<Value> = hits.iter().cloned().take(3).collect();
|
||||
assert_json_eq!(expected.clone(), serde_json::to_value(hits).unwrap(), ordered: false);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_no_params() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json! ({});
|
||||
|
||||
// an empty search should return the 20 first indexed document
|
||||
let dataset: Vec<Value> = serde_json::from_slice(include_bytes!("assets/test_set.json")).unwrap();
|
||||
let expected: Vec<Value> = dataset.into_iter().take(20).collect();
|
||||
let expected: Value = serde_json::to_value(expected).unwrap();
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert_json_eq!(expected.clone(), response["hits"].clone(), ordered: false);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_in_unexisting_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let query = json! ({
|
||||
"q": "exercitation"
|
||||
});
|
||||
|
||||
let expected = json! ({
|
||||
"message": "Index test not found",
|
||||
"errorCode": "index_not_found",
|
||||
"errorType": "invalid_request_error",
|
||||
"errorLink": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(404, status_code);
|
||||
assert_json_eq!(expected.clone(), response.clone(), ordered: false);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_unexpected_params() {
|
||||
|
||||
let query = json! ({"lol": "unexpected"});
|
||||
|
||||
let expected = "unknown field `lol`, expected one of `q`, `offset`, `limit`, `attributesToRetrieve`, `attributesToCrop`, `cropLength`, `attributesToHighlight`, `filters`, `matches`, `facetFilters`, `facetsDistribution` at line 1 column 6";
|
||||
|
||||
let post_query = serde_json::from_str::<meilisearch_http::routes::search::SearchQueryPost>(&query.clone().to_string());
|
||||
assert!(post_query.is_err());
|
||||
assert_eq!(expected.clone(), post_query.err().unwrap().to_string());
|
||||
|
||||
let get_query: Result<meilisearch_http::routes::search::SearchQuery, _> = serde_json::from_str(&query.clone().to_string());
|
||||
assert!(get_query.is_err());
|
||||
assert_eq!(expected.clone(), get_query.err().unwrap().to_string());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_limit() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
@ -468,3 +468,56 @@ async fn settings_that_contains_wildcard_is_wildcard() {
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap()[0], "*");
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap()[0], "*");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_displayed_attributes_field() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"age",
|
||||
"email",
|
||||
"gender",
|
||||
"name",
|
||||
"registered",
|
||||
],
|
||||
"stopWords": [
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"road": ["avenue", "street"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["name"],
|
||||
});
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
|
||||
let (response, _status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_json_eq!(body, response, ordered: true);
|
||||
}
|
@ -1,13 +1,13 @@
|
||||
[package]
|
||||
name = "meilisearch-schema"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.15.0" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.16.0" }
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
serde_json = { version = "1.0.59", features = ["preserve_order"] }
|
||||
zerocopy = "0.3.0"
|
||||
|
@ -25,10 +25,7 @@ impl<T> OptionAll<T> {
|
||||
}
|
||||
|
||||
pub fn is_all(&self) -> bool {
|
||||
match self {
|
||||
OptionAll::All => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, OptionAll::All)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-tokenizer"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-types"
|
||||
version = "0.15.0"
|
||||
version = "0.16.0"
|
||||
license = "MIT"
|
||||
authors = ["Clément Renault <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
Reference in New Issue
Block a user