Compare commits

...

20 Commits

Author SHA1 Message Date
Mubelotix
97ab85edc1 Remove dependencies 2025-06-26 16:15:28 +02:00
Mubelotix
c2259c018a Fix default-features and missing path dependencies 2025-06-25 13:57:43 +02:00
Mubelotix
35f774dfc9 Move all dependencies to workspace 2025-06-25 10:42:22 +02:00
Tamo
bd2bd0f33b Merge pull request #5697 from martin-g/documents-use-server-wait-task
tests: Use Server::wait_task() instead of Index::wait_task() in documents::
2025-06-23 16:33:21 +00:00
Tamo
e02733df4a Merge pull request #5698 from martin-g/index-use-server-wait-task
tests: Use Server::wait_task() instead of Index::wait_task() in index::
2025-06-23 16:31:40 +00:00
Tamo
f373ecc96a Merge pull request #5699 from martin-g/settings-use-server-wait-task
tests: Use Server::wait_task() instead of Index::wait_task() in settings::
2025-06-23 16:30:49 +00:00
Tamo
748a327271 Merge pull request #5700 from martin-g/search-use-server-wait-task
tests: Use Server::wait_task() instead of Index::wait_task() in search::
2025-06-23 16:29:53 +00:00
Louis Dureuil
43c4a229b7 Merge pull request #5692 from diksipav/5684-gemini-chat-completions-fix
Fix Gemini base_url when used with OpenAI clients
2025-06-23 09:03:34 +00:00
Martin Tzvetanov Grigorov
ca112a8b95 tests: Use Server::wait_task() instead of Index::wait_task() in index::
The code is mostly duplicated. Server::wait_task() has better handling for errors and more retries.

Signed-off-by: Martin Tzvetanov Grigorov <mgrigorov@apache.org>
2025-06-22 14:59:29 +03:00
Martin Tzvetanov Grigorov
855fa555a3 tests: Use Server::wait_task() instead of Index::wait_task() in search::
The code is mostly duplicated. Server::wait_task() has better handling for errors and more retries.

Signed-off-by: Martin Tzvetanov Grigorov <mgrigorov@apache.org>
2025-06-22 14:54:49 +03:00
Martin Tzvetanov Grigorov
a237c0797a tests: Use Server::wait_task() instead of Index::wait_task() in settings::
The code is mostly duplicated. Server::wait_task() has better handling for errors and more retries.

Signed-off-by: Martin Tzvetanov Grigorov <mgrigorov@apache.org>
2025-06-22 14:32:45 +03:00
Martin Tzvetanov Grigorov
5c46dc702a tests: Use Server::wait_task() instead of Index::wait_task()
The code is mostly duplicated.
Server::wait_task() has better handling for errors and more retries.

Signed-off-by: Martin Tzvetanov Grigorov <mgrigorov@apache.org>
2025-06-22 14:22:59 +03:00
Dijana Pavlovic
c17031d3de Fix Gemini base_url when used with OpenAI clients 2025-06-19 15:11:37 +02:00
Tamo
fc6cc80705 Merge pull request #5689 from Mubelotix/main
Remove old dependencies
2025-06-19 08:11:55 +00:00
Mubelotix
138d20b277 Remove old dependencies 2025-06-18 16:46:20 +02:00
Louis Dureuil
7c1a9113f9 Merge pull request #5686 from meilisearch/upgrade-dependencies-again
Upgrade dependencies
2025-06-18 09:22:18 +00:00
Louis Dureuil
07ae297ffd Merge pull request #5681 from martin-g/faster-settings-prefix_search_settings-it-tests
tests: Faster settings::prefix_search_settings IT tests
2025-06-18 09:20:56 +00:00
Clément Renault
4069dbcfca Upgrade incompatible dependencies 2025-06-17 22:23:37 +02:00
Clément Renault
03eb50fbac Upgrade dependencies 2025-06-17 22:03:06 +02:00
Martin Tzvetanov Grigorov
056f18bd02 tests: Faster settings::prefix_search_settings IT tests
Use shared server + unique indices

Signed-off-by: Martin Tzvetanov Grigorov <mgrigorov@apache.org>
2025-06-16 23:20:11 +03:00
49 changed files with 1139 additions and 1008 deletions

644
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -49,3 +49,147 @@ opt-level = 3
opt-level = 3
[profile.dev.package.roaring]
opt-level = 3
[workspace.dependencies]
actix-cors = "0.7.1"
actix-http = { version = "3.11.0", default-features = false }
actix-rt = "2.10.0"
actix-utils = "3.0.1"
actix-web = { version = "4.11.0", default-features = false }
actix-web-lab = { version = "0.24.1", default-features = false }
allocator-api2 = "0.3.0"
anyhow = "1.0.98"
arbitrary = "1.4.1"
arroy = "0.6.1"
async-openai = { git = "https://github.com/meilisearch/async-openai", branch = "better-error-handling" }
base64 = "0.22.1"
bbqueue.git = "https://github.com/meilisearch/bbqueue"
big_s = "1.0.2"
bimap = "0.6.3"
bincode = "1.3.3"
brotli = "8.0.1"
bstr = "1.12.0"
bumpalo = "3.18.1"
bumparaw-collections = "0.1.4"
byte-unit = "5.1.6"
bytemuck = "1.23.1"
byteorder = "1.5.0"
bytes = "1.10.1"
candle-core = "0.9.1"
candle-nn = "0.9.1"
candle-transformers = "0.9.1"
cargo_toml = "0.22.1"
charabia = { version="0.9.6", default-features = false }
clap = "4.5.40"
color-spantrace = "0.3.0"
concat-arrays = "0.1.2"
convert_case = "0.8.0"
criterion = "0.6.0"
crossbeam-channel = "0.5.15"
csv = "1.3.1"
derive_builder = "0.20.2"
deserr = "0.6.3"
either = "1.15.0"
enum-iterator = "2.1.0"
fastrand = "2.3.0"
flate2 = "1.1.2"
flume = { version="0.11.1", default-features = false }
fst = "0.4.7"
futures = "0.3.31"
futures-util = "0.3.31"
fxhash = "0.2.1"
fxprof-processed-profile = "0.7.0"
geoutils = "0.5.1"
grenad = { version="0.5.0", default-features = false }
hashbrown = "0.15.4"
heed = { version="0.22.0", default-features = false }
hex = "0.4.3"
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default-features = false }
hmac = "0.12.1"
http = "1.3.1"
indexmap = "2.9.0"
insta = "=1.39.0" # fixed version due to format breakages in v1.40
is-terminal = "0.4.16"
itertools = "0.14.0"
jsonwebtoken = "9.3.1"
lazy_static = "1.5.0"
levenshtein_automata = "0.2.1"
libproc = "0.14.10"
liquid = "0.26.11"
lru = "0.14.0"
manifest-dir-macros = "0.1.18"
maplit = "1.0.2"
md5 = "0.7.0"
memchr = "2.7.5"
memmap2 = "0.9.5"
mimalloc = "0.1.47"
mime = "0.3.17"
mopa-maintained = "0.2.3"
nom = "7.1.3"
nom_locate = "4.2.0"
num_cpus = "1.17.0"
obkv = "0.3.0"
once_cell = "1.21.3"
ordered-float = "5.0.0"
page_size = "0.6.0"
parking_lot = "0.12.4"
pin-project-lite = "0.2.16"
platform-dirs = "0.3.0"
prometheus = "0.14.0"
rand = "0.8.5"
rand_chacha = "0.3.1"
rayon = "1.10.0"
regex = "1.11.1"
regex-lite = "0.1.6"
reqwest = { version="0.12.20", default-features = false }
rhai = "1.22.2"
roaring = "0.10.12"
rstar = "0.12.2"
rustc-hash = "2.1.1"
rustls = { version="0.23.28", default-features = false }
rustls-pemfile = "2.2.0"
rustls-pki-types = "1.12.0"
secrecy = "0.10.3"
segment = "0.2.6"
serde = "1.0.219"
serde-cs = "0.2.4"
serde_json = "1.0.140"
serde_urlencoded = "0.7.1"
sha-1 = "0.10.1"
sha2 = "0.10.9"
siphasher = "1.0.1"
slice-group-by = "0.3.1"
smallstr = "0.3.0"
smallvec = "1.15.1"
smartstring = "1.0.1"
static-files = "0.2.5"
synchronoise = "1.0.1"
sysinfo = "0.35.2"
tar = "0.4.44"
temp-env = "0.3.6"
tempfile = "3.20.0"
termcolor = "1.4.1"
thiserror = "2.0.12"
thread_local = "1.1.9"
tiktoken-rs = "0.7.0"
time = "0.3.41"
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default-features = false }
tokio = "1.45.1"
toml = "0.8.23"
tracing = "0.1.41"
tracing-actix-web = "0.7.18"
tracing-error = "0.2.1"
tracing-subscriber = "0.3.19"
unescaper = "0.1.6"
ureq = "2.12.1"
url = "2.5.4"
urlencoding = "2.1.3"
utoipa = "5.4.0"
utoipa-scalar = "0.3.0"
uuid = "1.17.0"
vergen-git2 = "1.0.7"
wiremock = "0.6.3"
yaup = "0.3.1"
zip = "4.1.0"
cargo_metadata = "0.20.0"
futures-core = "0.3.31"

View File

@@ -11,27 +11,27 @@ edition.workspace = true
license.workspace = true
[dependencies]
anyhow = "1.0.95"
bumpalo = "3.16.0"
csv = "1.3.1"
memmap2 = "0.9.5"
milli = { path = "../milli" }
mimalloc = { version = "0.1.43", default-features = false }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
tempfile = "3.15.0"
anyhow.workspace = true
bumpalo.workspace = true
csv.workspace = true
memmap2.workspace = true
milli.path = "../milli"
mimalloc.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
tempfile.workspace = true
[dev-dependencies]
criterion = { version = "0.5.1", features = ["html_reports"] }
rand = "0.8.5"
rand_chacha = "0.3.1"
roaring = "0.10.10"
criterion = { workspace = true, features = ["html_reports"] }
rand.workspace = true
rand_chacha.workspace = true
roaring.workspace = true
[build-dependencies]
anyhow = "1.0.95"
bytes = "1.9.0"
convert_case = "0.6.0"
flate2 = "1.0.35"
reqwest = { version = "0.12.15", features = ["blocking", "rustls-tls"], default-features = false }
anyhow.workspace = true
bytes.workspace = true
convert_case.workspace = true
flate2.workspace = true
reqwest = { workspace = true, features = ["blocking", "rustls-tls"], default-features = false }
[features]
default = ["milli/all-tokenizations"]

View File

@@ -67,7 +67,7 @@ fn main() -> anyhow::Result<()> {
writeln!(
&mut manifest_paths_file,
r#"pub const {}: &str = {:?};"#,
dataset.to_case(Case::ScreamingSnake),
dataset.to_case(Case::UpperSnake),
out_file.display(),
)?;

View File

@@ -11,8 +11,8 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
time = { version = "0.3.37", features = ["parsing"] }
time = { workspace = true, features = ["parsing"] }
[build-dependencies]
anyhow = "1.0.95"
vergen-git2 = "1.0.2"
anyhow.workspace = true
vergen-git2.workspace = true

View File

@@ -11,24 +11,23 @@ readme.workspace = true
license.workspace = true
[dependencies]
anyhow = "1.0.95"
flate2 = "1.0.35"
http = "1.2.0"
meilisearch-types = { path = "../meilisearch-types" }
once_cell = "1.20.2"
regex = "1.11.1"
roaring = { version = "0.10.10", features = ["serde"] }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
tar = "0.4.43"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tracing = "0.1.41"
uuid = { version = "1.11.0", features = ["serde", "v4"] }
flate2.workspace = true
http.workspace = true
meilisearch-types.path = "../meilisearch-types"
once_cell.workspace = true
regex.workspace = true
roaring = { workspace = true, features = ["serde"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["preserve_order"] }
tar.workspace = true
tempfile.workspace = true
thiserror.workspace = true
time = { workspace = true, features = ["serde-well-known", "formatting", "parsing", "macros"] }
tracing.workspace = true
uuid = { workspace = true, features = ["serde", "v4"] }
[dev-dependencies]
big_s = "1.0.2"
maplit = "1.0.2"
meili-snap = { path = "../meili-snap" }
meilisearch-types = { path = "../meilisearch-types" }
big_s.workspace = true
maplit.workspace = true
meili-snap.path = "../meili-snap"
meilisearch-types.path = "../meilisearch-types"

View File

@@ -11,7 +11,7 @@ edition.workspace = true
license.workspace = true
[dependencies]
tempfile = "3.15.0"
thiserror = "2.0.9"
tracing = "0.1.41"
uuid = { version = "1.11.0", features = ["serde", "v4"] }
tempfile.workspace = true
thiserror.workspace = true
tracing.workspace = true
uuid = { workspace = true, features = ["serde", "v4"] }

View File

@@ -12,10 +12,9 @@ edition.workspace = true
license.workspace = true
[dependencies]
nom = "7.1.3"
nom_locate = "4.2.0"
unescaper = "0.1.5"
nom.workspace = true
nom_locate.workspace = true
unescaper.workspace = true
[dev-dependencies]
# fixed version due to format breakages in v1.40
insta = "=1.39.0"
insta.workspace = true

View File

@@ -13,10 +13,10 @@ edition.workspace = true
license.workspace = true
[dependencies]
serde_json = "1.0"
serde_json.workspace = true
[dev-dependencies]
criterion = { version = "0.5.1", features = ["html_reports"] }
criterion = { workspace = true, features = ["html_reports"] }
[[bench]]
name = "benchmarks"

View File

@@ -11,7 +11,7 @@ cargo-fuzz = true
[dependencies]
libfuzzer-sys = "0.4"
arbitrary-json = "0.1.1"
json-depth-checker = { path = "../../json-depth-checker" }
json-depth-checker.path = "../../json-depth-checker"
[dependencies.flatten-serde-json]
path = ".."

View File

@@ -11,12 +11,11 @@ edition.workspace = true
license.workspace = true
[dependencies]
arbitrary = { version = "1.4.1", features = ["derive"] }
bumpalo = "3.16.0"
clap = { version = "4.5.24", features = ["derive"] }
either = "1.13.0"
fastrand = "2.3.0"
milli = { path = "../milli" }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
tempfile = "3.15.0"
arbitrary = { workspace = true, features = ["derive"] }
bumpalo.workspace = true
clap = { workspace = true, features = ["derive"] }
either.workspace = true
fastrand.workspace = true
milli.path = "../milli"
serde_json = { workspace = true, features = ["preserve_order"] }
tempfile.workspace = true

View File

@@ -11,44 +11,39 @@ edition.workspace = true
license.workspace = true
[dependencies]
anyhow = "1.0.95"
bincode = "1.3.3"
byte-unit = "5.1.6"
bumpalo = "3.16.0"
bumparaw-collections = "0.1.4"
convert_case = "0.6.0"
csv = "1.3.1"
derive_builder = "0.20.2"
dump = { path = "../dump" }
enum-iterator = "2.1.0"
file-store = { path = "../file-store" }
flate2 = "1.0.35"
indexmap = "2.7.0"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
memmap2 = "0.9.5"
page_size = "0.6.0"
rayon = "1.10.0"
roaring = { version = "0.10.10", features = ["serde"] }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.138", features = ["preserve_order"] }
synchronoise = "1.0.1"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = [
anyhow.workspace = true
byte-unit.workspace = true
bumpalo.workspace = true
convert_case.workspace = true
dump.path = "../dump"
enum-iterator.workspace = true
file-store.path = "../file-store"
flate2.workspace = true
indexmap.workspace = true
meilisearch-auth.path = "../meilisearch-auth"
meilisearch-types.path = "../meilisearch-types"
memmap2.workspace = true
page_size.workspace = true
rayon.workspace = true
roaring = { workspace = true, features = ["serde"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["preserve_order"] }
synchronoise.workspace = true
tempfile.workspace = true
thiserror.workspace = true
time = { workspace = true, features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
tracing = "0.1.41"
ureq = "2.12.1"
uuid = { version = "1.11.0", features = ["serde", "v4"] }
tracing.workspace = true
ureq.workspace = true
uuid = { workspace = true, features = ["serde", "v4"] }
[dev-dependencies]
big_s = "1.0.2"
crossbeam-channel = "0.5.15"
# fixed version due to format breakages in v1.40
insta = { version = "=1.39.0", features = ["json", "redactions"] }
maplit = "1.0.2"
meili-snap = { path = "../meili-snap" }
big_s.workspace = true
crossbeam-channel.workspace = true
insta = { workspace = true, features = ["json", "redactions"] }
maplit.workspace = true
meili-snap.path = "../meili-snap"

View File

@@ -12,10 +12,10 @@ edition.workspace = true
license.workspace = true
[dependencies]
serde_json = "1.0"
serde_json.workspace = true
[dev-dependencies]
criterion = "0.5.1"
criterion.workspace = true
[[bench]]
name = "depth"

View File

@@ -11,9 +11,8 @@ edition.workspace = true
license.workspace = true
[dependencies]
# fixed version due to format breakages in v1.40
insta = { version = "=1.39.0", features = ["json", "redactions"] }
md5 = "0.7.0"
once_cell = "1.20"
regex-lite = "0.1.6"
uuid = { version = "1.17.0", features = ["v4"] }
insta = { workspace = true, features = ["json", "redactions"] }
md5.workspace = true
once_cell.workspace = true
regex-lite.workspace = true
uuid = { workspace = true, features = ["v4"] }

View File

@@ -11,16 +11,15 @@ edition.workspace = true
license.workspace = true
[dependencies]
base64 = "0.22.1"
enum-iterator = "2.1.0"
hmac = "0.12.1"
maplit = "1.0.2"
meilisearch-types = { path = "../meilisearch-types" }
rand = "0.8.5"
roaring = { version = "0.10.10", features = ["serde"] }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
sha2 = "0.10.8"
thiserror = "2.0.9"
time = { version = "0.3.37", features = ["serde-well-known", "formatting", "parsing", "macros"] }
uuid = { version = "1.11.0", features = ["serde", "v4"] }
base64.workspace = true
enum-iterator.workspace = true
hmac.workspace = true
maplit.workspace = true
meilisearch-types.path = "../meilisearch-types"
rand.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["preserve_order"] }
sha2.workspace = true
thiserror.workspace = true
time = { workspace = true, features = ["serde-well-known", "formatting", "parsing", "macros"] }
uuid = { workspace = true, features = ["serde", "v4"] }

View File

@@ -11,42 +11,39 @@ edition.workspace = true
license.workspace = true
[dependencies]
actix-web = { version = "4.9.0", default-features = false }
anyhow = "1.0.95"
bumpalo = "3.16.0"
bumparaw-collections = "0.1.4"
convert_case = "0.6.0"
csv = "1.3.1"
deserr = { version = "0.6.3", features = ["actix-web"] }
either = { version = "1.13.0", features = ["serde"] }
enum-iterator = "2.1.0"
file-store = { path = "../file-store" }
flate2 = "1.0.35"
fst = "0.4.7"
memmap2 = "0.9.5"
milli = { path = "../milli" }
roaring = { version = "0.10.10", features = ["serde"] }
rustc-hash = "2.1.0"
serde = { version = "1.0.217", features = ["derive"] }
serde-cs = "0.2.4"
serde_json = { version = "1.0.135", features = ["preserve_order"] }
tar = "0.4.43"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = [
actix-web = { workspace = true, default-features = false }
anyhow.workspace = true
bumpalo.workspace = true
bumparaw-collections.workspace = true
convert_case.workspace = true
csv.workspace = true
deserr = { workspace = true, features = ["actix-web"] }
enum-iterator.workspace = true
file-store.path = "../file-store"
flate2.workspace = true
fst.workspace = true
memmap2.workspace = true
milli.path = "../milli"
roaring = { workspace = true, features = ["serde"] }
rustc-hash.workspace = true
serde = { workspace = true, features = ["derive"] }
serde-cs.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
tar.workspace = true
tempfile.workspace = true
thiserror.workspace = true
time = { workspace = true, features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
tokio = "1.43"
utoipa = { version = "5.3.1", features = ["macros"] }
uuid = { version = "1.11.0", features = ["serde", "v4"] }
utoipa = { workspace = true, features = ["macros"] }
uuid = { workspace = true, features = ["serde", "v4"] }
[dev-dependencies]
# fixed version due to format breakages in v1.40
insta = "=1.39.0"
meili-snap = { path = "../meili-snap" }
insta.workspace = true
meili-snap.path = "../meili-snap"
[features]
# all specialized tokenizations

View File

@@ -154,7 +154,7 @@ impl ChatCompletionSource {
match self {
OpenAi => Some("https://api.openai.com/v1/"),
Mistral => Some("https://api.mistral.ai/v1/"),
Gemini => Some("https://generativelanguage.googleapis.com/v1beta/openai/"),
Gemini => Some("https://generativelanguage.googleapis.com/v1beta/openai"),
AzureOpenAi | VLlm => None,
}
}

View File

@@ -13,96 +13,84 @@ license.workspace = true
default-run = "meilisearch"
[dependencies]
actix-cors = "0.7.0"
actix-http = { version = "3.9.0", default-features = false, features = [
actix-cors.workspace = true
actix-http = { workspace = true, default-features = false, features = [
"compress-brotli",
"compress-gzip",
"rustls-0_23",
] }
actix-utils = "3.0.1"
actix-web = { version = "4.9.0", default-features = false, features = [
actix-web = { workspace = true, default-features = false, features = [
"macros",
"compress-brotli",
"compress-gzip",
"cookies",
"rustls-0_23",
] }
anyhow = { version = "1.0.95", features = ["backtrace"] }
async-trait = "0.1.85"
bstr = "1.11.3"
byte-unit = { version = "5.1.6", features = ["serde"] }
bytes = "1.9.0"
bumpalo = "3.16.0"
clap = { version = "4.5.24", features = ["derive", "env"] }
crossbeam-channel = "0.5.15"
deserr = { version = "0.6.3", features = ["actix-web"] }
dump = { path = "../dump" }
either = "1.13.0"
file-store = { path = "../file-store" }
flate2 = "1.0.35"
fst = "0.4.7"
futures = "0.3.31"
futures-util = "0.3.31"
index-scheduler = { path = "../index-scheduler" }
indexmap = { version = "2.7.0", features = ["serde"] }
is-terminal = "0.4.13"
itertools = "0.14.0"
jsonwebtoken = "9.3.0"
lazy_static = "1.5.0"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
mimalloc = { version = "0.1.43", default-features = false }
mime = "0.3.17"
num_cpus = "1.16.0"
obkv = "0.3.0"
once_cell = "1.20.2"
ordered-float = "4.6.0"
parking_lot = "0.12.3"
permissive-json-pointer = { path = "../permissive-json-pointer" }
pin-project-lite = "0.2.16"
platform-dirs = "0.3.0"
prometheus = { version = "0.14.0", features = ["process"] }
rand = "0.8.5"
rayon = "1.10.0"
regex = "1.11.1"
reqwest = { version = "0.12.12", features = [
anyhow = { workspace = true, features = ["backtrace"] }
bstr.workspace = true
byte-unit = { workspace = true, features = ["serde"] }
bytes.workspace = true
bumpalo.workspace = true
clap = { workspace = true, features = ["derive", "env"] }
crossbeam-channel.workspace = true
deserr = { workspace = true, features = ["actix-web"] }
dump.path = "../dump"
either.workspace = true
file-store.path = "../file-store"
flate2.workspace = true
futures.workspace = true
futures-util.workspace = true
index-scheduler.path = "../index-scheduler"
indexmap = { workspace = true, features = ["serde"] }
is-terminal.workspace = true
itertools.workspace = true
jsonwebtoken.workspace = true
lazy_static.workspace = true
meilisearch-auth.path = "../meilisearch-auth"
meilisearch-types.path = "../meilisearch-types"
mimalloc.workspace = true
mime.workspace = true
num_cpus.workspace = true
obkv.workspace = true
once_cell.workspace = true
permissive-json-pointer.path = "../permissive-json-pointer"
pin-project-lite.workspace = true
platform-dirs.workspace = true
prometheus = { workspace = true, features = ["process"] }
rand.workspace = true
regex.workspace = true
reqwest = { workspace = true, features = [
"rustls-tls",
"json",
], default-features = false }
rustls = { version = "0.23.20", features = ["ring"], default-features = false }
rustls-pki-types = { version = "1.10.1", features = ["alloc"] }
rustls-pemfile = "2.2.0"
segment = { version = "0.2.5" }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
sha2 = "0.10.8"
siphasher = "1.0.1"
slice-group-by = "0.3.1"
static-files = { version = "0.2.4", optional = true }
sysinfo = "0.33.1"
tar = "0.4.43"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = [
rustls = { workspace = true, features = ["ring"], default-features = false }
rustls-pemfile.workspace = true
segment.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["preserve_order"] }
static-files = { workspace = true, optional = true }
sysinfo.workspace = true
tempfile.workspace = true
thiserror.workspace = true
time = { workspace = true, features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
tokio = { version = "1.43.1", features = ["full"] }
toml = "0.8.19"
uuid = { version = "1.11.0", features = ["serde", "v4"] }
serde_urlencoded = "0.7.1"
termcolor = "1.4.1"
url = { version = "2.5.4", features = ["serde"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["json"] }
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
tracing-actix-web = "0.7.15"
build-info = { version = "1.7.0", path = "../build-info" }
roaring = "0.10.10"
mopa-maintained = "0.2.3"
utoipa = { version = "5.3.1", features = [
tokio = { workspace = true, features = ["full"] }
toml.workspace = true
uuid = { workspace = true, features = ["serde", "v4"] }
serde_urlencoded.workspace = true
termcolor.workspace = true
url = { workspace = true, features = ["serde"] }
tracing.workspace = true
tracing-subscriber = { workspace = true, features = ["json"] }
tracing-trace.path = "../tracing-trace"
tracing-actix-web.workspace = true
build-info.path = "../build-info"
roaring.workspace = true
utoipa = { workspace = true, features = [
"actix_extras",
"macros",
"non_strict_integers",
@@ -111,36 +99,35 @@ utoipa = { version = "5.3.1", features = [
"time",
"openapi_extensions",
] }
utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] }
utoipa-scalar = { workspace = true, optional = true, features = ["actix-web"] }
async-openai = { git = "https://github.com/meilisearch/async-openai", branch = "better-error-handling" }
secrecy = "0.10.3"
actix-web-lab = { version = "0.24.1", default-features = false }
secrecy.workspace = true
actix-web-lab = { workspace = true, default-features = false }
[dev-dependencies]
actix-rt = "2.10.0"
brotli = "6.0.0"
# fixed version due to format breakages in v1.40
insta = { version = "=1.39.0", features = ["redactions"] }
manifest-dir-macros = "0.1.18"
maplit = "1.0.2"
meili-snap = { path = "../meili-snap" }
temp-env = "0.3.6"
urlencoding = "2.1.3"
wiremock = "0.6.2"
yaup = "0.3.1"
actix-rt.workspace = true
brotli.workspace = true
insta = { workspace = true, features = ["redactions"] }
manifest-dir-macros.workspace = true
maplit.workspace = true
meili-snap.path = "../meili-snap"
temp-env.workspace = true
urlencoding.workspace = true
wiremock.workspace = true
yaup.workspace = true
[build-dependencies]
anyhow = { version = "1.0.95", optional = true }
cargo_toml = { version = "0.21.0", optional = true }
hex = { version = "0.4.3", optional = true }
reqwest = { version = "0.12.12", features = [
anyhow = { workspace = true, optional = true }
cargo_toml = { workspace = true, optional = true }
hex = { workspace = true, optional = true }
reqwest = { workspace = true, features = [
"blocking",
"rustls-tls",
], default-features = false, optional = true }
sha-1 = { version = "0.10.1", optional = true }
static-files = { version = "0.2.4", optional = true }
tempfile = { version = "3.15.0", optional = true }
zip = { version = "2.3.0", optional = true }
sha-1 = { workspace = true, optional = true }
static-files = { workspace = true, optional = true }
tempfile = { workspace = true, optional = true }
zip = { workspace = true, optional = true }
[features]
default = ["meilisearch-types/all-tokenizations", "mini-dashboard"]

View File

@@ -293,7 +293,7 @@ async fn add_csv_document() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@@ -358,7 +358,7 @@ async fn add_csv_document_with_types() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@@ -434,7 +434,7 @@ async fn add_csv_document_with_custom_delimiter() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@@ -991,7 +991,7 @@ async fn add_documents_no_index_creation() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@@ -1068,7 +1068,7 @@ async fn document_addition_with_primary_key() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
snapshot!(code, @"200 OK");
@@ -1120,7 +1120,7 @@ async fn document_addition_with_huge_int_primary_key() {
let (response, code) = index.add_documents(documents, Some("primary")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(response,
@r###"
{
@@ -1178,7 +1178,7 @@ async fn replace_document() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@@ -1190,7 +1190,7 @@ async fn replace_document() {
let (task, code) = index.add_documents(documents, None).await;
snapshot!(code,@"202 Accepted");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
@@ -1362,7 +1362,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -1399,7 +1399,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -1436,7 +1436,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -1478,7 +1478,7 @@ async fn error_add_documents_missing_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -1527,7 +1527,7 @@ async fn error_document_field_limit_reached_in_one_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@@ -1576,7 +1576,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@@ -1611,7 +1611,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@@ -1660,7 +1660,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@@ -1705,7 +1705,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@@ -1741,7 +1741,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@@ -1790,7 +1790,7 @@ async fn add_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@@ -1914,7 +1914,7 @@ async fn update_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@@ -1983,7 +1983,7 @@ async fn update_documents_with_geo_field() {
}
]);
let (task, _status_code) = index.update_documents(updated_documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@@ -2097,7 +2097,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }),
@@ -2135,7 +2135,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2173,7 +2173,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2211,7 +2211,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2249,7 +2249,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2287,7 +2287,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2325,7 +2325,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2363,7 +2363,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2401,7 +2401,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2439,7 +2439,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2477,7 +2477,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2515,7 +2515,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@@ -2556,7 +2556,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@@ -2593,7 +2593,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@@ -2630,7 +2630,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@@ -2674,7 +2674,7 @@ async fn add_invalid_geo_and_then_settings() {
]);
let (ret, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.succeeded();
let ret = server.wait_task(ret.uid()).await.succeeded();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@@ -2697,7 +2697,7 @@ async fn add_invalid_geo_and_then_settings() {
let (ret, code) = index.update_settings(json!({ "sortableAttributes": ["_geo"] })).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.failed();
let ret = server.wait_task(ret.uid()).await.failed();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@@ -2765,7 +2765,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@@ -2806,7 +2806,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@@ -2845,7 +2845,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@@ -2884,12 +2884,12 @@ async fn add_documents_with_primary_key_twice() {
]);
let (task, _status_code) = index.add_documents(documents.clone(), Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
let (task, _status_code) = index.add_documents(documents, Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
@@ -2922,7 +2922,7 @@ async fn batch_several_documents_addition() {
// wait first batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
// run a second completely failing batch
@@ -2936,7 +2936,7 @@ async fn batch_several_documents_addition() {
// wait second batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await;

View File

@@ -5,11 +5,12 @@ use crate::json;
#[actix_rt::test]
async fn delete_one_document_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@@ -19,7 +20,7 @@ async fn delete_one_unexisting_document() {
index.create(None).await;
let (response, code) = index.delete_document(0).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
}
#[actix_rt::test]
@@ -28,10 +29,10 @@ async fn delete_one_document() {
let index = server.unique_index();
let (task, _status_code) =
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, status_code) = index.delete_document(0).await;
assert_eq!(status_code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get_document(0, None).await;
assert_eq!(code, 404);
@@ -44,7 +45,7 @@ async fn clear_all_documents_unexisting_index() {
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@@ -57,11 +58,11 @@ async fn clear_all_documents() {
None,
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@@ -72,11 +73,11 @@ async fn clear_all_documents_empty_index() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@@ -95,7 +96,7 @@ async fn error_delete_batch_unexisting_index() {
});
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
assert_eq!(response["error"], expected_response);
}
@@ -104,11 +105,11 @@ async fn delete_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.delete_batch(vec![1, 0]).await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@@ -120,11 +121,11 @@ async fn delete_no_document_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{response}");
let _update = index.wait_task(response.uid()).await.succeeded();
let _update = server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
@@ -146,7 +147,7 @@ async fn delete_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (stats, _) = index.stats().await;
snapshot!(json_string!(stats, {
@@ -180,7 +181,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@@ -253,7 +254,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@@ -328,7 +329,7 @@ async fn delete_document_by_complex_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.delete_document_by_filter(
json!({ "filter": ["color != red", "color != green", "color EXISTS"] }),
@@ -345,7 +346,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@@ -404,7 +405,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",

View File

@@ -23,7 +23,7 @@ async fn error_get_unexisting_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(1, None).await;
@@ -43,7 +43,7 @@ async fn get_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
"id": 0,
@@ -52,7 +52,7 @@ async fn get_document() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, None).await;
assert_eq!(code, 200);
assert_eq!(
@@ -276,7 +276,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
@@ -293,7 +293,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content"] }))).await;
assert_eq!(code, 200);
@@ -369,7 +369,7 @@ async fn get_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({})).await;
let (response2, code2) = index.get_all_documents_raw("").await;
@@ -525,7 +525,7 @@ async fn get_document_by_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.fetch_documents(json!({
@@ -651,7 +651,7 @@ async fn get_document_invalid_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", "illegal/docid"] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,illegal/docid").await;
@@ -683,7 +683,7 @@ async fn get_document_not_found_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", 3, 42] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,3,42").await;
@@ -726,7 +726,7 @@ async fn get_document_by_ids_and_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.fetch_documents(json!({"ids": [2], "filter": "color = blue" })).await;
@@ -854,7 +854,7 @@ async fn get_document_with_vectors() {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// by default you shouldn't see the `_vectors` object
let (documents, _code) = index.get_all_documents(Default::default()).await;

View File

@@ -34,7 +34,7 @@ async fn document_update_with_primary_key() {
let (response, code) = index.update_documents(documents, Some("primary")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@@ -63,7 +63,7 @@ async fn update_document() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@@ -75,7 +75,7 @@ async fn update_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@@ -107,7 +107,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@@ -119,7 +119,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@@ -142,7 +142,7 @@ async fn update_larger_dataset() {
let index = server.unique_index();
let documents = serde_json::from_str(include_str!("../assets/test_set.json")).unwrap();
let (task, _code) = index.update_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
assert_eq!(response["type"], "documentAdditionOrUpdate");
@@ -166,7 +166,7 @@ async fn error_update_documents_bad_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], json!("failed"));
assert_eq!(
response["error"]["message"],
@@ -194,7 +194,7 @@ async fn error_update_documents_missing_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(
response["error"]["message"],
@@ -219,7 +219,7 @@ async fn update_faceted_document() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents: Vec<_> = (0..1000)
.map(|id| {
@@ -233,7 +233,7 @@ async fn update_faceted_document() {
let (response, code) = index.add_documents(documents.into(), None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@@ -245,7 +245,7 @@ async fn update_faceted_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({"limit": 10}), |response, code| {

View File

@@ -17,7 +17,7 @@ async fn create_index_no_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@@ -34,7 +34,7 @@ async fn create_index_with_gzip_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@@ -83,7 +83,7 @@ async fn create_index_with_zlib_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@@ -100,7 +100,7 @@ async fn create_index_with_brotli_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@@ -117,7 +117,7 @@ async fn create_index_with_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@@ -132,7 +132,7 @@ async fn create_index_with_invalid_primary_key() {
let index = server.unique_index();
let (response, code) = index.add_documents(documents, Some("title")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@@ -142,7 +142,7 @@ async fn create_index_with_invalid_primary_key() {
let (response, code) = index.add_documents(documents, Some("id")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@@ -181,7 +181,7 @@ async fn error_create_existing_index() {
let (task, _) = index.create(Some("primary")).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
let msg = format!(
"Index `{}` already exists.",
task["indexUid"].as_str().expect("indexUid should exist").trim_matches('"')

View File

@@ -9,7 +9,7 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 200);
@@ -17,18 +17,19 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 404);
}
#[actix_rt::test]
async fn error_delete_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_index_fail().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": "Index `DOES_NOT_EXISTS` not found.",
@@ -37,7 +38,7 @@ async fn error_delete_unexisting_index() {
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(response["error"], expected_response);
}
@@ -58,7 +59,7 @@ async fn loop_delete_add_documents() {
}
for task in tasks {
let response = index.wait_task(task).await.succeeded();
let response = server.wait_task(task).await.succeeded();
assert_eq!(response["status"], "succeeded", "{}", response);
}
}

View File

@@ -12,7 +12,7 @@ async fn create_and_get_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get().await;

View File

@@ -10,7 +10,7 @@ async fn stats() {
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.stats().await;
@@ -33,7 +33,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.stats().await;

View File

@@ -12,10 +12,10 @@ async fn update_primary_key() {
let (task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get().await;
@@ -42,12 +42,12 @@ async fn create_and_update_with_different_encoding() {
let (create_task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(create_task.uid()).await.succeeded();
server.wait_task(create_task.uid()).await.succeeded();
let index = index.with_encoder(Encoder::Brotli);
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@@ -58,23 +58,24 @@ async fn update_nothing() {
assert_eq!(code, 202);
index.wait_task(task1.uid()).await.succeeded();
server.wait_task(task1.uid()).await.succeeded();
let (task2, code) = index.update(None).await;
assert_eq!(code, 202);
index.wait_task(task2.uid()).await.succeeded();
server.wait_task(task2.uid()).await.succeeded();
}
#[actix_rt::test]
async fn error_update_existing_primary_key() {
let server = Server::new_shared();
let index = shared_index_with_documents().await;
let (update_task, code) = index.update_index_fail(Some("primary")).await;
assert_eq!(code, 202);
let response = index.wait_task(update_task.uid()).await.failed();
let response = server.wait_task(update_task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
@@ -88,12 +89,13 @@ async fn error_update_existing_primary_key() {
#[actix_rt::test]
async fn error_update_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),

View File

@@ -152,7 +152,7 @@ async fn distinct_search_with_offset_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();
@@ -211,7 +211,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();
@@ -281,7 +281,7 @@ async fn distinct_at_search_time() {
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
let task = index.wait_task(task.uid()).await.succeeded();
let task = server.wait_task(task.uid()).await.succeeded();
snapshot!(task, name: "succeed");
fn get_hits(response: &Value) -> Vec<String> {

View File

@@ -425,7 +425,7 @@ async fn search_non_filterable_facets() {
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
// Wait for the settings update to complete
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@@ -456,7 +456,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
let index = server.unique_index();
let (response, _code) =
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@@ -486,7 +486,7 @@ async fn search_non_filterable_facets_no_filterable() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"filterableAttributes": []})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@@ -517,7 +517,7 @@ async fn search_non_filterable_facets_multiple_facets() {
let index = server.unique_index();
let (response, _uid) =
index.update_settings(json!({"filterableAttributes": ["title", "genres"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"facets": ["doggo", "neko"]})).await;
snapshot!(code, @"400 Bad Request");
@@ -1001,7 +1001,7 @@ async fn sort_geo_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
@@ -1028,7 +1028,7 @@ async fn sort_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
@@ -1054,7 +1054,7 @@ async fn sort_unsortable_attribute() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: Attribute `title` is not sortable. Available sortable attributes are: `id`.", index.uid),
@@ -1081,7 +1081,7 @@ async fn sort_invalid_syntax() {
let index = server.unique_index();
let (response, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
@@ -1112,7 +1112,7 @@ async fn sort_unset_ranking_rule() {
json!({"sortableAttributes": ["title"], "rankingRules": ["proximity", "exactness"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", index.uid),
@@ -1199,7 +1199,7 @@ async fn distinct_at_search_time() {
let index = server.unique_index();
let (response, _code) =
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
@@ -1214,7 +1214,7 @@ async fn distinct_at_search_time() {
"###);
let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
@@ -1229,7 +1229,7 @@ async fn distinct_at_search_time() {
"###);
let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;

View File

@@ -50,11 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@@ -70,11 +70,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@@ -94,7 +94,7 @@ async fn simple_facet_search() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -207,10 +207,10 @@ async fn simple_facet_search_on_movies() {
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
@@ -228,7 +228,7 @@ async fn advanced_facet_search() {
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "enabled": false })).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@@ -252,7 +252,7 @@ async fn more_advanced_facet_search() {
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "disableOnWords": ["adventre"] })).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@@ -276,7 +276,7 @@ async fn simple_facet_search_with_max_values() {
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -298,7 +298,7 @@ async fn simple_facet_search_by_count_with_max_values() {
.await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -314,7 +314,7 @@ async fn non_filterable_facet_search_error() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -333,7 +333,7 @@ async fn facet_search_dont_support_words() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
@@ -351,7 +351,7 @@ async fn simple_facet_search_with_sort_by_count() {
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -370,7 +370,7 @@ async fn add_documents_and_deactivate_facet_search() {
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"facetSearch": false,
@@ -378,7 +378,7 @@ async fn add_documents_and_deactivate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -406,10 +406,10 @@ async fn deactivate_facet_search_and_add_documents() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -437,10 +437,10 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -448,7 +448,7 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -469,10 +469,10 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -480,7 +480,7 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@@ -920,13 +920,13 @@ async fn distinct_facet_search_on_movies() {
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;

View File

@@ -90,7 +90,7 @@ async fn search_with_contains_filter() {
let documents = DOCUMENTS.clone();
let (request, _code) = index.add_documents(documents, None).await;
index.wait_task(request.uid()).await.succeeded();
server.wait_task(request.uid()).await.succeeded();
let (response, code) = index
.search_post(json!({
@@ -257,7 +257,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index
.update_settings(json!({"filterableAttributes": [{
@@ -269,7 +269,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@@ -334,7 +334,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@@ -445,7 +445,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter returns an error
index
@@ -544,7 +544,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index

View File

@@ -26,7 +26,7 @@ async fn search_formatted_from_sdk() {
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy" }
]);
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await;
index
.search(
@@ -65,7 +65,7 @@ async fn formatted_contain_wildcard() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index.search(json!({ "q": "pésti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|response, code|
@@ -398,7 +398,7 @@ async fn displayedattr_2_smol() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
@@ -596,7 +596,7 @@ async fn test_cjk_highlight() {
{ "id": 1, "title": "大卫到了扫罗那里" },
]);
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({"q": "", "attributesToHighlight": ["title"]}), |response, code| {

View File

@@ -17,11 +17,11 @@ async fn index_with_documents_user_provided<'a>(
"dimensions": 2}}} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
}
@@ -37,11 +37,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Val
}}} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
}
@@ -543,7 +543,7 @@ async fn distinct_is_applied() {
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// pure keyword
let (response, code) = index
@@ -633,7 +633,7 @@ async fn retrieve_vectors() {
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
.await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(
@@ -683,7 +683,7 @@ async fn retrieve_vectors() {
let (response, code) =
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(

View File

@@ -99,7 +99,7 @@ async fn simple_search() {
)
.await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// english
index
@@ -215,7 +215,7 @@ async fn force_locales() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@@ -293,7 +293,7 @@ async fn force_locales_with_pattern() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@@ -369,7 +369,7 @@ async fn force_locales_with_pattern_nested() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese
index
@@ -444,7 +444,7 @@ async fn force_different_locales_with_pattern() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// force chinese
index
@@ -522,7 +522,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// auto infer any language
index
@@ -596,7 +596,7 @@ async fn auto_infer_locales_at_search() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -695,7 +695,7 @@ async fn force_different_locales_with_pattern_nested() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// chinese
index
@@ -773,7 +773,7 @@ async fn settings_change() {
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.update_settings(json!({
"searchableAttributes": ["document_en", "document_ja", "document_zh"],
@@ -792,7 +792,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// chinese
index
@@ -855,7 +855,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// chinese
index
@@ -910,7 +910,7 @@ async fn invalid_locales() {
)
.await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"q": "Atta", "locales": ["invalid"]})).await;
snapshot!(code, @"400 Bad Request");
@@ -1028,7 +1028,7 @@ async fn simple_facet_search() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "進撃", "locales": ["cmn"]}))
@@ -1090,7 +1090,7 @@ async fn facet_search_with_localized_attributes() {
}
"###);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "进击", "locales": ["cmn"]}))
@@ -1159,7 +1159,7 @@ async fn swedish_search() {
]
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
// infer swedish
index
@@ -1280,7 +1280,7 @@ async fn german_search() {
]
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
// infer swedish
index

View File

@@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
let index = server.unique_index();
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
}

View File

@@ -38,11 +38,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index.search(query.clone(), test.clone()).await;
@@ -51,11 +51,11 @@ async fn test_settings_documents_indexing_swapping_and_search(
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index.search(query.clone(), test.clone()).await;
}
@@ -104,7 +104,7 @@ async fn bug_5547() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let mut documents = Vec::new();
for i in 0..65_535 {
@@ -112,7 +112,7 @@ async fn bug_5547() {
}
let (response, _code) = index.add_documents(json!(documents), Some("id")).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"q": "title"})).await;
assert_eq!(code, 200);
snapshot!(response["hits"], @r###"[{"id":0,"title":"title0"},{"id":1,"title":"title1"},{"id":10,"title":"title10"},{"id":100,"title":"title100"},{"id":101,"title":"title101"},{"id":102,"title":"title102"},{"id":103,"title":"title103"},{"id":104,"title":"title104"},{"id":105,"title":"title105"},{"id":106,"title":"title106"},{"id":107,"title":"title107"},{"id":108,"title":"title108"},{"id":1000,"title":"title1000"},{"id":1001,"title":"title1001"},{"id":1002,"title":"title1002"},{"id":1003,"title":"title1003"},{"id":1004,"title":"title1004"},{"id":1005,"title":"title1005"},{"id":1006,"title":"title1006"},{"id":1007,"title":"title1007"}]"###);
@@ -131,7 +131,7 @@ async fn search_with_stop_word() {
let documents = DOCUMENTS.clone();
let (task, _code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// prefix search
index
@@ -196,7 +196,7 @@ async fn search_with_typo_settings() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "287947" }), |response, code| {
@@ -228,7 +228,7 @@ async fn phrase_search_with_stop_word() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "how \"to\" train \"the" }), |response, code| {
@@ -308,11 +308,11 @@ async fn negative_special_cases_search() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) =
index.update_settings(json!({"synonyms": { "escape": ["gläss"] }})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
index
@@ -338,7 +338,7 @@ async fn test_kanji_language_detection() {
{ "id": 2, "title": "הַשּׁוּעָל הַמָּהִיר (״הַחוּם״) לֹא יָכוֹל לִקְפֹּץ 9.94 מֶטְרִים, נָכוֹן? ברר, 1.5°C- בַּחוּץ!" }
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "東京"}), |response, code| {
@@ -361,10 +361,10 @@ async fn test_thai_language() {
{ "id": 2, "title": "สบู่สมุนไพรฝางแดงผสมว่านหางจรเข้ 100 กรัม จำนวน 6 ก้อน" }
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update_settings(json!({"rankingRules": ["exactness"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "สบู"}), |response, code| {
@@ -586,7 +586,7 @@ async fn displayed_attributes() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
@@ -601,7 +601,7 @@ async fn placeholder_search_is_hard_limited() {
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -630,7 +630,7 @@ async fn placeholder_search_is_hard_limited() {
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -665,7 +665,7 @@ async fn search_is_hard_limited() {
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -696,7 +696,7 @@ async fn search_is_hard_limited() {
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -735,7 +735,7 @@ async fn faceting_max_values_per_facet() {
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -752,7 +752,7 @@ async fn faceting_max_values_per_facet() {
let (task, _status_code) =
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -1033,7 +1033,7 @@ async fn test_degraded_score_details() {
index.add_documents(json!(documents), None).await;
// We can't really use anything else than 0ms here; otherwise, the test will get flaky.
let (res, _code) = index.update_settings(json!({ "searchCutoffMs": 0 })).await;
index.wait_task(res.uid()).await.succeeded();
server.wait_task(res.uid()).await.succeeded();
index
.search(
@@ -1126,7 +1126,7 @@ async fn camelcased_words() {
{ "id": 4, "title": "testab" },
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "deLonghi"}), |response, code| {
@@ -1345,12 +1345,12 @@ async fn simple_search_with_strange_synonyms() {
let (task, _status_code) =
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
let r = index.wait_task(task.uid()).await.succeeded();
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "How to train"}), |response, code| {
@@ -1416,11 +1416,11 @@ async fn change_attributes_settings() {
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task,_status_code) =
index.update_settings(json!({ "searchableAttributes": ["father", "mother", "doggos"], "filterableAttributes": ["doggos"] })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// search
index
@@ -1923,7 +1923,7 @@ async fn change_facet_casing() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
.add_documents(
@@ -1936,7 +1936,7 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
.add_documents(
@@ -1949,7 +1949,7 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({ "facets": ["dog"] }), |response, code| {
@@ -2062,7 +2062,7 @@ async fn simple_search_changing_unrelated_settings() {
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "Dragon"}), |response, code| {
@@ -2084,7 +2084,7 @@ async fn simple_search_changing_unrelated_settings() {
let (task, _status_code) =
index.update_settings(json!({ "filterableAttributes": ["title"] })).await;
let r = index.wait_task(task.uid()).await.succeeded();
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
index
@@ -2106,7 +2106,7 @@ async fn simple_search_changing_unrelated_settings() {
.await;
let (task, _status_code) = index.update_settings(json!({ "filterableAttributes": [] })).await;
let r = index.wait_task(task.uid()).await.succeeded();
let r = server.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
index

View File

@@ -21,7 +21,7 @@ pub async fn shared_movies_index() -> &'static Index<'static, Shared> {
let documents = DOCUMENTS.clone();
let (response, _code) = movies_index.add_documents(documents, None).await;
movies_index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (value, _) = movies_index
.update_settings(json!({
@@ -37,7 +37,7 @@ pub async fn shared_movies_index() -> &'static Index<'static, Shared> {
]
}))
.await;
movies_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
movies_index.to_shared()
})
.await
@@ -52,7 +52,7 @@ pub async fn shared_batman_index() -> &'static Index<'static, Shared> {
let documents = SCORE_DOCUMENTS.clone();
let (response, _code) = batman_index.add_documents(documents, None).await;
batman_index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (value, _) = batman_index
.update_settings(json!({
@@ -68,7 +68,7 @@ pub async fn shared_batman_index() -> &'static Index<'static, Shared> {
]
}))
.await;
batman_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
batman_index.to_shared()
})
.await
@@ -1085,14 +1085,14 @@ async fn federation_filter() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@@ -1152,7 +1152,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@@ -1167,7 +1167,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// two identical placeholder searches should have all results from the first query
let (response, code) = server
@@ -1365,7 +1365,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@@ -1380,7 +1380,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// two identical placeholder searches should have all results from the first query
let (response, code) = server
@@ -1424,7 +1424,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@@ -1439,7 +1439,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// return mothers and fathers ordered across fields.
let (response, code) = server
@@ -1638,7 +1638,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@@ -1653,7 +1653,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// two identical placeholder searches should have all results from the first query
let (response, code) = server
@@ -3048,14 +3048,14 @@ async fn federation_invalid_weight() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@@ -3082,14 +3082,14 @@ async fn federation_null_weight() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@@ -3150,7 +3150,7 @@ async fn federation_federated_contains_pagination() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// fail when a federated query contains "limit"
let (response, code) = server
@@ -3230,11 +3230,11 @@ async fn federation_federated_contains_facets() {
)
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// empty facets are actually OK
let (response, code) = server
@@ -3314,7 +3314,7 @@ async fn federation_non_faceted_for_an_index() {
)
.await;
fruits_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let fruits_no_name_index = server.unique_index_with_prefix("fruits-no-name");
@@ -3324,18 +3324,18 @@ async fn federation_non_faceted_for_an_index() {
)
.await;
fruits_no_name_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let fruits_no_facets_index = server.unique_index_with_prefix("fruits-no-facets");
let (value, _) =
fruits_no_facets_index.update_settings(json!({"searchableAttributes": ["name"]})).await;
fruits_no_facets_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = fruits_no_facets_index.add_documents(documents, None).await;
fruits_no_facets_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// fails
let (response, code) = server
@@ -3435,7 +3435,7 @@ async fn federation_non_federated_contains_federation_option() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// fail when a non-federated query contains "federationOptions"
let (response, code) = server
@@ -3473,12 +3473,12 @@ async fn federation_vector_single_index() {
}
}}))
.await;
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// same embedder
let (response, code) = server
@@ -3670,12 +3670,12 @@ async fn federation_vector_two_indexes() {
},
}}))
.await;
vectors_animal_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = vectors_animal_index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
vectors_animal_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let vectors_sentiment_index = server.unique_index_with_prefix("vectors-sentiment");
@@ -3687,12 +3687,12 @@ async fn federation_vector_two_indexes() {
}
}}))
.await;
vectors_sentiment_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = vectors_sentiment_index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
vectors_sentiment_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@@ -4154,7 +4154,7 @@ async fn federation_facets_different_indexes_same_facet() {
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = batman_2_index.add_documents(documents, None).await;
batman_2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = batman_2_index
.update_settings(json!({
@@ -4170,7 +4170,7 @@ async fn federation_facets_different_indexes_same_facet() {
]
}))
.await;
batman_2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// return titles ordered across indexes
let (response, code) = server
@@ -4677,7 +4677,7 @@ async fn federation_facets_same_indexes() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = doggos_index.add_documents(documents, None).await;
doggos_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = doggos_index
.update_settings(json!({
@@ -4692,13 +4692,13 @@ async fn federation_facets_same_indexes() {
]
}))
.await;
doggos_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let doggos2_index = server.unique_index_with_prefix("doggos_2");
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = doggos2_index.add_documents(documents, None).await;
doggos2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = doggos2_index
.update_settings(json!({
@@ -4713,7 +4713,7 @@ async fn federation_facets_same_indexes() {
]
}))
.await;
doggos2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {
@@ -4980,7 +4980,7 @@ async fn federation_inconsistent_merge_order() {
let documents = DOCUMENTS.clone();
let (value, _) = movies2_index.add_documents(documents, None).await;
movies2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let (value, _) = movies2_index
.update_settings(json!({
@@ -4999,7 +4999,7 @@ async fn federation_inconsistent_merge_order() {
}
}))
.await;
movies2_index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
let batman_index = shared_batman_index().await;

View File

@@ -114,14 +114,14 @@ async fn ensure_placeholder_search_hit_count_valid() {
}
]);
let (task, _code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index
.update_settings(
json!({ "rankingRules": ["distinct:asc"], "distinctAttribute": "distinct"}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
for page in 0..=4 {
index

View File

@@ -9,7 +9,7 @@ async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value)
let index = server.unique_index();
let (task, _code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
}
@@ -65,7 +65,7 @@ async fn search_no_searchable_attribute_set() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -78,7 +78,7 @@ async fn search_no_searchable_attribute_set() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -109,7 +109,7 @@ async fn search_on_all_attributes_restricted_set() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
@@ -194,7 +194,7 @@ async fn word_ranking_rule_order_exact_words() {
let (task, _status_code) = index
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// simple search should return 2 documents (ids: 2 and 3).
index
@@ -360,7 +360,7 @@ async fn search_on_exact_field() {
let (response, code) =
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// Searching on an exact attribute should only return the document matching without typo.
index
.search(json!({"q": "Marvel", "attributesToSearchOn": ["exact"]}), |response, code| {
@@ -557,7 +557,7 @@ async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
let (task, _status_code) =
index.update_settings_searchable_attributes(json!(["details.title"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -595,7 +595,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(
@@ -608,7 +608,7 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.search(

View File

@@ -7,7 +7,7 @@ async fn set_and_reset_distinct_attribute() {
let index = server.unique_index();
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(task1.uid()).await.succeeded();
server.wait_task(task1.uid()).await.succeeded();
let (response, _) = index.settings().await;
@@ -15,7 +15,7 @@ async fn set_and_reset_distinct_attribute() {
let (task2, _status_code) = index.update_settings(json!({ "distinctAttribute": null })).await;
index.wait_task(task2.uid()).await.succeeded();
server.wait_task(task2.uid()).await.succeeded();
let (response, _) = index.settings().await;
@@ -28,7 +28,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let index = server.unique_index();
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(update_task1.uid()).await.succeeded();
server.wait_task(update_task1.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@@ -36,7 +36,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let (update_task2, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(update_task2.uid()).await.succeeded();
server.wait_task(update_task2.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;

View File

@@ -58,7 +58,7 @@ macro_rules! test_setting_routes {
let index = server.unique_index();
let (response, code) = index.create(None).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let url = format!("/indexes/{}/settings/{}",
index.uid,
stringify!($setting)
@@ -209,7 +209,7 @@ async fn get_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
let settings = response.as_object().unwrap();
@@ -254,7 +254,7 @@ async fn secrets_are_hidden_in_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -285,7 +285,7 @@ async fn secrets_are_hidden_in_settings() {
let settings_update_uid = response.uid();
index.wait_task(settings_update_uid).await.succeeded();
server.wait_task(settings_update_uid).await.succeeded();
let (response, code) = index.settings().await;
meili_snap::snapshot!(code, @"200 OK");
@@ -384,14 +384,14 @@ async fn test_partial_update() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["foo"]));
assert_eq!(response["searchableAttributes"], json!(["*"]));
let (task, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@@ -406,7 +406,7 @@ async fn error_delete_settings_unexisting_index() {
let (task, code) = index.delete_settings().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@@ -424,12 +424,12 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (update_task,_status_code) = index
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
@@ -439,7 +439,7 @@ async fn reset_all_settings() {
assert_eq!(response["filterableAttributes"], json!(["age"]));
let (delete_task, _status_code) = index.delete_settings().await;
index.wait_task(delete_task.uid()).await.succeeded();
server.wait_task(delete_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@@ -460,11 +460,11 @@ async fn update_setting_unexisting_index() {
let index = server.unique_index();
let (task, code) = index.update_settings(json!({})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get().await;
assert_eq!(code, 200);
let (task, _status_code) = index.delete_settings().await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@@ -507,7 +507,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let index = server.unique_index();
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@@ -515,7 +515,7 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let (task, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
@@ -540,7 +540,7 @@ async fn granular_filterable_attributes() {
{ "attributePatterns": ["default-facet-search"], "features": { "filter": {"equality": true, "comparison": true} } },
] })).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200, "{response}");

View File

@@ -26,11 +26,11 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
#[actix_rt::test]
async fn add_docs_and_disable() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -38,8 +38,8 @@ async fn add_docs_and_disable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// only 1 document should match
index
@@ -86,8 +86,8 @@ async fn add_docs_and_disable() {
#[actix_rt::test]
async fn disable_and_add_docs() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@@ -95,11 +95,11 @@ async fn disable_and_add_docs() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
// only 1 document should match
index
@@ -145,8 +145,8 @@ async fn disable_and_add_docs() {
#[actix_rt::test]
async fn disable_add_docs_and_enable() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@@ -154,11 +154,11 @@ async fn disable_add_docs_and_enable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -166,8 +166,8 @@ async fn disable_add_docs_and_enable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(2).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index
@@ -253,8 +253,8 @@ async fn disable_add_docs_and_enable() {
#[actix_rt::test]
async fn disable_add_docs_and_reset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@@ -262,11 +262,11 @@ async fn disable_add_docs_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -274,8 +274,8 @@ async fn disable_add_docs_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(2).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index
@@ -361,19 +361,19 @@ async fn disable_add_docs_and_reset() {
#[actix_rt::test]
async fn default_behavior() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index

View File

@@ -30,7 +30,7 @@ async fn attribute_scale_search() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -39,7 +39,7 @@ async fn attribute_scale_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// the expected order is [1, 3, 2] instead of [3, 1, 2]
// because the attribute scale doesn't make the difference between 1 and 3.
@@ -103,7 +103,7 @@ async fn attribute_scale_phrase_search() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = index
.update_settings(json!({
@@ -111,7 +111,7 @@ async fn attribute_scale_phrase_search() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// the expected order is [1, 3] instead of [3, 1]
// because the attribute scale doesn't make the difference between 1 and 3.
@@ -171,7 +171,7 @@ async fn word_scale_set_and_reset() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
// Set and reset the setting ensuring the swap between the 2 settings is applied.
let (update_task1, _code) = index
@@ -180,7 +180,7 @@ async fn word_scale_set_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(update_task1.uid()).await.succeeded();
server.wait_task(update_task1.uid()).await.succeeded();
let (update_task2, _code) = index
.update_settings(json!({
@@ -188,7 +188,7 @@ async fn word_scale_set_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(update_task2.uid()).await.succeeded();
server.wait_task(update_task2.uid()).await.succeeded();
// [3, 1, 2]
index
@@ -286,7 +286,7 @@ async fn attribute_scale_default_ranking_rules() {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@@ -294,7 +294,7 @@ async fn attribute_scale_default_ranking_rules() {
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
// the expected order is [3, 1, 2]
index

View File

@@ -15,7 +15,7 @@ async fn set_and_reset() {
"dictionary": ["J.R.R.", "J. R. R."],
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
@@ -45,7 +45,7 @@ async fn set_and_reset() {
}))
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
@@ -74,7 +74,7 @@ async fn set_and_search() {
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
server.wait_task(add_task.uid()).await.succeeded();
let (update_task, _code) = index
.update_settings(json!({
@@ -83,7 +83,7 @@ async fn set_and_search() {
"dictionary": ["#", "A#", "B#", "C#", "D#", "E#", "F#", "G#"],
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "&", "attributesToHighlight": ["content"]}), |response, code| {
@@ -228,7 +228,7 @@ async fn advanced_synergies() {
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
server.wait_task(add_task.uid()).await.succeeded();
let (update_task, _code) = index
.update_settings(json!({
@@ -243,7 +243,7 @@ async fn advanced_synergies() {
}
}))
.await;
index.wait_task(update_task.uid()).await.succeeded();
server.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "J.R.R.", "attributesToHighlight": ["content"]}), |response, code| {
@@ -353,7 +353,7 @@ async fn advanced_synergies() {
"dictionary": ["J.R.R.", "J. R. R.", "J.K.", "J. K."],
}))
.await;
index.wait_task(_response.uid()).await.succeeded();
server.wait_task(_response.uid()).await.succeeded();
index
.search(json!({"q": "jk", "attributesToHighlight": ["content"]}), |response, code| {

View File

@@ -9,15 +9,15 @@ edition.workspace = true
license.workspace = true
[dependencies]
anyhow = "1.0.95"
clap = { version = "4.5.24", features = ["derive"] }
dump = { path = "../dump" }
file-store = { path = "../file-store" }
indexmap = { version = "2.7.0", features = ["serde"] }
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
tempfile = "3.15.0"
time = { version = "0.3.37", features = ["formatting", "parsing", "alloc"] }
uuid = { version = "1.11.0", features = ["v4"], default-features = false }
anyhow.workspace = true
clap = { workspace = true, features = ["derive"] }
dump.path = "../dump"
file-store.path = "../file-store"
indexmap = { workspace = true, features = ["serde"] }
meilisearch-auth.path = "../meilisearch-auth"
meilisearch-types.path = "../meilisearch-types"
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["preserve_order"] }
tempfile.workspace = true
time = { workspace = true, features = ["formatting", "parsing", "alloc"] }
uuid = { workspace = true, features = ["v4"] }

View File

@@ -12,114 +12,110 @@ readme.workspace = true
license.workspace = true
[dependencies]
big_s = "1.0.2"
bimap = { version = "0.6.3", features = ["serde"] }
bincode = "1.3.3"
bstr = "1.11.3"
bytemuck = { version = "1.21.0", features = ["extern_crate_alloc"] }
byteorder = "1.5.0"
charabia = { version = "0.9.6", default-features = false }
concat-arrays = "0.1.2"
convert_case = "0.6.0"
crossbeam-channel = "0.5.15"
deserr = "0.6.3"
either = { version = "1.13.0", features = ["serde"] }
flatten-serde-json = { path = "../flatten-serde-json" }
fst = "0.4.7"
fxhash = "0.2.1"
geoutils = "0.5.1"
grenad = { version = "0.5.0", default-features = false, features = [
big_s.workspace = true
bimap = { workspace = true, features = ["serde"] }
bincode.workspace = true
bstr.workspace = true
bytemuck = { workspace = true, features = ["extern_crate_alloc"] }
byteorder.workspace = true
charabia = { workspace = true, default-features = false }
concat-arrays.workspace = true
convert_case.workspace = true
crossbeam-channel.workspace = true
deserr.workspace = true
either = { workspace = true, features = ["serde"] }
flatten-serde-json.path = "../flatten-serde-json"
fst.workspace = true
fxhash.workspace = true
geoutils.workspace = true
grenad = { workspace = true, default-features = false, features = [
"rayon",
"tempfile",
] }
heed = { version = "0.22.0", default-features = false, features = [
heed = { workspace = true, default-features = false, features = [
"serde-json",
"serde-bincode",
] }
indexmap = { version = "2.7.0", features = ["serde"] }
json-depth-checker = { path = "../json-depth-checker" }
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
memchr = "2.7.4"
memmap2 = "0.9.5"
obkv = "0.3.0"
once_cell = "1.20.2"
ordered-float = "4.6.0"
rayon = "1.10.0"
roaring = { version = "0.10.10", features = ["serde"] }
rstar = { version = "0.12.2", features = ["serde"] }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order", "raw_value"] }
slice-group-by = "0.3.1"
smallstr = { version = "0.3.0", features = ["serde"] }
smallvec = "1.13.2"
smartstring = "1.0.1"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = [
indexmap = { workspace = true, features = ["serde"] }
json-depth-checker.path = "../json-depth-checker"
levenshtein_automata = { workspace = true, features = ["fst_automaton"] }
memchr.workspace = true
memmap2.workspace = true
obkv.workspace = true
once_cell.workspace = true
ordered-float.workspace = true
rayon.workspace = true
roaring = { workspace = true, features = ["serde"] }
rstar = { workspace = true, features = ["serde"] }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["preserve_order", "raw_value"] }
slice-group-by.workspace = true
smallstr = { workspace = true, features = ["serde"] }
smallvec.workspace = true
smartstring.workspace = true
tempfile.workspace = true
thiserror.workspace = true
time = { workspace = true, features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
uuid = { version = "1.11.0", features = ["v4"] }
uuid = { workspace = true, features = ["v4"] }
filter-parser = { path = "../filter-parser" }
filter-parser.path = "../filter-parser"
# documents words self-join
itertools = "0.14.0"
itertools.workspace = true
csv = "1.3.1"
candle-core = { version = "0.8.2" }
candle-transformers = { version = "0.8.2" }
candle-nn = { version = "0.8.2" }
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default-features = false, features = [
csv.workspace = true
candle-core = { workspace = true }
candle-transformers = { workspace = true }
candle-nn = { workspace = true }
tokenizers = { workspace = true, default-features = false, features = [
"onig",
] }
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default-features = false, features = [
hf-hub = { workspace = true, default-features = false, features = [
"online",
] }
tiktoken-rs = "0.6.0"
liquid = "0.26.9"
rhai = { git = "https://github.com/rhaiscript/rhai", rev = "ef3df63121d27aacd838f366f2b83fd65f20a1e4", features = [
tiktoken-rs.workspace = true
liquid.workspace = true
rhai = { workspace = true, features = [
"serde",
"no_module",
"no_custom_syntax",
"no_time",
"sync",
] }
arroy = "0.6.1"
rand = "0.8.5"
tracing = "0.1.41"
ureq = { version = "2.12.1", features = ["json"] }
url = "2.5.4"
rayon-par-bridge = "0.1.0"
hashbrown = "0.15.2"
bumpalo = "3.16.0"
bumparaw-collections = "0.1.4"
thread_local = "1.1.8"
allocator-api2 = "0.2.21"
rustc-hash = "2.1.0"
uell = "0.1.0"
enum-iterator = "2.1.0"
bbqueue = { git = "https://github.com/meilisearch/bbqueue" }
flume = { version = "0.11.1", default-features = false }
utoipa = { version = "5.3.1", features = [
arroy.workspace = true
rand.workspace = true
tracing.workspace = true
ureq = { workspace = true, features = ["json"] }
url.workspace = true
hashbrown.workspace = true
bumpalo.workspace = true
bumparaw-collections.workspace = true
thread_local.workspace = true
rustc-hash.workspace = true
enum-iterator.workspace = true
bbqueue.workspace = true
flume.workspace = true
utoipa = { workspace = true, features = [
"non_strict_integers",
"preserve_order",
"uuid",
"time",
"openapi_extensions",
] }
lru = "0.13.0"
lru.workspace = true
[dev-dependencies]
mimalloc = { version = "0.1.43", default-features = false }
# fixed version due to format breakages in v1.40
insta = "=1.39.0"
maplit = "1.0.2"
md5 = "0.7.0"
meili-snap = { path = "../meili-snap" }
rand = { version = "0.8.5", features = ["small_rng"] }
mimalloc.workspace = true
insta.workspace = true
maplit.workspace = true
md5.workspace = true
meili-snap.path = "../meili-snap"
rand = { workspace = true, features = ["small_rng"] }
[features]
all-tokenizations = ["charabia/default"]

View File

@@ -13,7 +13,7 @@ edition.workspace = true
license.workspace = true
[dependencies]
serde_json = "1.0"
serde_json.workspace = true
[dev-dependencies]
big_s = "1.0"
big_s.workspace = true

View File

@@ -6,19 +6,15 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
color-spantrace = "0.2.1"
fxprof-processed-profile = "0.7.0"
serde = { version = "1.0.217", features = ["derive"] }
serde_json = "1.0.135"
tracing = "0.1.41"
tracing-error = "0.2.1"
tracing-subscriber = "0.3.19"
byte-unit = { version = "5.1.6", default-features = false, features = [
"std",
"byte",
"serde",
] }
tokio = { version = "1.43.1", features = ["sync"] }
color-spantrace.workspace = true
fxprof-processed-profile.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
tracing.workspace = true
tracing-error.workspace = true
tracing-subscriber.workspace = true
byte-unit = { workspace = true, features = ["serde"] }
tokio = { workspace = true, features = ["sync"] }
[target.'cfg(any(target_os = "linux", target_os = "macos"))'.dependencies]
libproc = "0.14.10"
libproc.workspace = true

View File

@@ -11,34 +11,33 @@ license.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.95"
build-info = { version = "1.7.0", path = "../build-info" }
cargo_metadata = "0.19.1"
clap = { version = "4.5.24", features = ["derive"] }
futures-core = "0.3.31"
futures-util = "0.3.31"
reqwest = { version = "0.12.12", features = [
anyhow.workspace = true
build-info.path = "../build-info"
cargo_metadata.workspace = true
clap = { workspace = true, features = ["derive"] }
futures-util.workspace = true
reqwest = { workspace = true, features = [
"stream",
"json",
"rustls-tls",
], default-features = false }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = "1.0.135"
sha2 = "0.10.8"
sysinfo = "0.33.1"
time = { version = "0.3.37", features = [
serde = { workspace = true, features = ["derive"] }
serde_json.workspace = true
sha2.workspace = true
sysinfo.workspace = true
time = { workspace = true, features = [
"serde",
"serde-human-readable",
"macros",
] }
tokio = { version = "1.43.1", features = [
tokio = { workspace = true, features = [
"rt",
"net",
"time",
"process",
"signal",
] }
tracing = "0.1.41"
tracing-subscriber = "0.3.19"
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
uuid = { version = "1.11.0", features = ["v7", "serde"] }
tracing.workspace = true
tracing-subscriber.workspace = true
tracing-trace.path = "../tracing-trace"
uuid = { workspace = true, features = ["v7", "serde"] }