mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-09 22:25:44 +00:00
Compare commits
281 Commits
prototype-
...
prototype-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3852563e79 | ||
|
|
796f9fdf5b | ||
|
|
43c4a229b7 | ||
|
|
c17031d3de | ||
|
|
fc6cc80705 | ||
|
|
138d20b277 | ||
|
|
7c1a9113f9 | ||
|
|
07ae297ffd | ||
|
|
4069dbcfca | ||
|
|
03eb50fbac | ||
|
|
2616d776f2 | ||
|
|
3004db95af | ||
|
|
9a729bf31d | ||
|
|
8bfa6a7f54 | ||
|
|
056f18bd02 | ||
|
|
fe9866aca8 | ||
|
|
60f105a4a3 | ||
|
|
abb399b802 | ||
|
|
aeaac7270e | ||
|
|
f45770a3ce | ||
|
|
0e10ff1aa3 | ||
|
|
6ee608c2d1 | ||
|
|
95e8a9bef1 | ||
|
|
0598320252 | ||
|
|
2269104337 | ||
|
|
6b4d69996c | ||
|
|
df4e3c2e43 | ||
|
|
e2b549c5ee | ||
|
|
8390006ebf | ||
|
|
7200437246 | ||
|
|
68e7bfb37f | ||
|
|
209c4bfc18 | ||
|
|
396d76046d | ||
|
|
9ae73e3c05 | ||
|
|
933e319364 | ||
|
|
596617dd31 | ||
|
|
f3dd6834c6 | ||
|
|
e8774ad079 | ||
|
|
5d191c479e | ||
|
|
c3368e6859 | ||
|
|
40776ed4cd | ||
|
|
9bda9a9a64 | ||
|
|
aefebdeb8b | ||
|
|
646e44ddf9 | ||
|
|
9275ce1503 | ||
|
|
48d2d3a5cd | ||
|
|
7ec0c9aa83 | ||
|
|
484fdd9ce2 | ||
|
|
7533a11143 | ||
|
|
19d077a4b1 | ||
|
|
b8845d1015 | ||
|
|
620867d611 | ||
|
|
77cc3678b5 | ||
|
|
a73d3c03e9 | ||
|
|
824f5b12ce | ||
|
|
bb4baf7fae | ||
|
|
0263eb0aec | ||
|
|
8a916a4e42 | ||
|
|
506ee40dc5 | ||
|
|
952fabf8a0 | ||
|
|
7ea2e4ec7b | ||
|
|
a0a4ac66ec | ||
|
|
b037e416d3 | ||
|
|
e9d547556d | ||
|
|
ab0eba2f72 | ||
|
|
5ceb3c6a10 | ||
|
|
34d572e3e5 | ||
|
|
28e6adc435 | ||
|
|
6a683975bf | ||
|
|
c60d11fb42 | ||
|
|
32207f9f19 | ||
|
|
7c1b15fd06 | ||
|
|
4352a924d7 | ||
|
|
bbe802c656 | ||
|
|
b32e30ad27 | ||
|
|
ae115cee78 | ||
|
|
1824fbd1b5 | ||
|
|
34d8a54c4b | ||
|
|
8fa6e8670a | ||
|
|
c640856cc1 | ||
|
|
1a1317ab0f | ||
|
|
9cab754942 | ||
|
|
4a0ec15ad2 | ||
|
|
985b892b7a | ||
|
|
605dea4f85 | ||
|
|
95d4775d4a | ||
|
|
416fcf47f1 | ||
|
|
6433e49882 | ||
|
|
85939ae8ad | ||
|
|
e654eddf56 | ||
|
|
170ad87e44 | ||
|
|
bc56087a17 | ||
|
|
29d82ade56 | ||
|
|
a7f5d3bb7a | ||
|
|
48e8356a16 | ||
|
|
1fda05c2fd | ||
|
|
8f96724adf | ||
|
|
01e5b0effa | ||
|
|
2ec9664878 | ||
|
|
7f5a0c0013 | ||
|
|
f5c3dad3ed | ||
|
|
10028515ac | ||
|
|
63ccd19ab1 | ||
|
|
1b4d344e18 | ||
|
|
89c0cf9b12 | ||
|
|
3770e70581 | ||
|
|
e497008161 | ||
|
|
a15ebb283f | ||
|
|
3f256a7959 | ||
|
|
b41af0d0f6 | ||
|
|
3ebff65ef3 | ||
|
|
62e2a5a324 | ||
|
|
90d96ee415 | ||
|
|
38b317857d | ||
|
|
765e76857f | ||
|
|
204cf423b2 | ||
|
|
e575b5af74 | ||
|
|
4fc24cb691 | ||
|
|
8bc8484e95 | ||
|
|
7b49c30d8c | ||
|
|
239851046d | ||
|
|
60796dfb14 | ||
|
|
c7cb72a77a | ||
|
|
4d819ea636 | ||
|
|
666680bd87 | ||
|
|
27527849bb | ||
|
|
1d02efeab9 | ||
|
|
53fc98d3b0 | ||
|
|
263300b3a3 | ||
|
|
ab3d92d163 | ||
|
|
ef9fc6c854 | ||
|
|
61b0f50d4d | ||
|
|
0557a4dd2f | ||
|
|
930d5a09a8 | ||
|
|
8b0c4291ae | ||
|
|
c9efdf8c88 | ||
|
|
72736c0ea9 | ||
|
|
49317bbee4 | ||
|
|
af54c8381e | ||
|
|
693fcd5752 | ||
|
|
733175359a | ||
|
|
7c6162f0bf | ||
|
|
d6ae39bf0f | ||
|
|
e416bbc1de | ||
|
|
2cfd363dc6 | ||
|
|
70aa78a2c2 | ||
|
|
96c81762ed | ||
|
|
0b1f634afa | ||
|
|
d3d5015854 | ||
|
|
f95f29c492 | ||
|
|
a50b69b868 | ||
|
|
3668f5f021 | ||
|
|
54fdf379bb | ||
|
|
41b1cd5a73 | ||
|
|
5c14a25d5a | ||
|
|
fda2843135 | ||
|
|
9347330f3a | ||
|
|
56c9190dab | ||
|
|
6b986dceaf | ||
|
|
ea6bb4df1d | ||
|
|
a3d2f64725 | ||
|
|
d5526cffff | ||
|
|
5cb75d1f2a | ||
|
|
921e3c4ffe | ||
|
|
52591761af | ||
|
|
f80182f0a9 | ||
|
|
3b30b6a57a | ||
|
|
5efc78db55 | ||
|
|
cffbe3fcb6 | ||
|
|
8d8fcb9846 | ||
|
|
20049669c9 | ||
|
|
db28d13cb1 | ||
|
|
5a7cfc57fd | ||
|
|
790621dc29 | ||
|
|
1d577ae98b | ||
|
|
88e9a55d44 | ||
|
|
dbe551cf99 | ||
|
|
a299fbd33b | ||
|
|
193119acb9 | ||
|
|
4c71118699 | ||
|
|
5fe2943d3c | ||
|
|
86ff502327 | ||
|
|
6b1a345dce | ||
|
|
b54ece690b | ||
|
|
3ea167bade | ||
|
|
1158d6689f | ||
|
|
d9b0463a0b | ||
|
|
ae9899f179 | ||
|
|
308fd7128e | ||
|
|
27e7c00622 | ||
|
|
58207da934 | ||
|
|
fb8b832192 | ||
|
|
17207b5405 | ||
|
|
bd95503eba | ||
|
|
8b8b0d802c | ||
|
|
d329e86250 | ||
|
|
d416b3b390 | ||
|
|
54f5e74744 | ||
|
|
fd4b192a39 | ||
|
|
3c13feebf7 | ||
|
|
1811168b96 | ||
|
|
b06cc1e0a2 | ||
|
|
44f812c36d | ||
|
|
c8e77b5f25 | ||
|
|
283f516e15 | ||
|
|
b4ca0a8c98 | ||
|
|
b658e38acd | ||
|
|
f87e46cc16 | ||
|
|
65354b414a | ||
|
|
025df397c0 | ||
|
|
f77abc9dc8 | ||
|
|
7e9909ee45 | ||
|
|
43ec97fe45 | ||
|
|
02929e241b | ||
|
|
c13efde042 | ||
|
|
36f0a1492c | ||
|
|
ce65ad213b | ||
|
|
3e0de6cb83 | ||
|
|
f3d691667d | ||
|
|
ce9c930d10 | ||
|
|
fc88b003b4 | ||
|
|
cf5d26124a | ||
|
|
38b1c57fa8 | ||
|
|
25c525b057 | ||
|
|
83cd28b60b | ||
|
|
48cad4132a | ||
|
|
4897ad99d0 | ||
|
|
46ff78b4ec | ||
|
|
9ad43b6841 | ||
|
|
c9ec502ed9 | ||
|
|
18aed75d3b | ||
|
|
6738a4f6ee | ||
|
|
d2948adea3 | ||
|
|
f54b57e5be | ||
|
|
95821d0bde | ||
|
|
f690fa0686 | ||
|
|
24e94b28c1 | ||
|
|
34d58f35c8 | ||
|
|
1d5265caf4 | ||
|
|
97aeb6db4d | ||
|
|
f888f87635 | ||
|
|
8c8d98eeaa | ||
|
|
c5ae43cac6 | ||
|
|
57eecd6197 | ||
|
|
2fe5c78cb6 | ||
|
|
8047cfe438 | ||
|
|
5717e5c1af | ||
|
|
bb07038c31 | ||
|
|
d1a088ea0b | ||
|
|
b68e22c0e6 | ||
|
|
03a36f116e | ||
|
|
8a0bf24ed5 | ||
|
|
e2763471e5 | ||
|
|
b2f2c5d69f | ||
|
|
1594c54e23 | ||
|
|
13b607bd68 | ||
|
|
3d130d31c8 | ||
|
|
4cda584b0c | ||
|
|
248c90bad5 | ||
|
|
0e9040e605 | ||
|
|
3e3c00f44c | ||
|
|
d986a3bbaf | ||
|
|
c2ceb8e41b | ||
|
|
79db2e67fb | ||
|
|
865f24cfef | ||
|
|
3fbe1df770 | ||
|
|
150d1db86b | ||
|
|
806e983aa5 | ||
|
|
e96c1d4b0f | ||
|
|
15cdc6924b | ||
|
|
677e8b122c | ||
|
|
75a7e40a27 | ||
|
|
c8939944c6 | ||
|
|
4e6252fb03 | ||
|
|
8bd8e744f3 | ||
|
|
53f32a7dd7 | ||
|
|
47a7ed93d3 | ||
|
|
2ac826edca | ||
|
|
89aff2081c | ||
|
|
3b773b3416 | ||
|
|
648b2876f6 |
10
.github/workflows/db-change-missing.yml
vendored
10
.github/workflows/db-change-missing.yml
vendored
@@ -4,22 +4,22 @@ on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
|
||||
jobs:
|
||||
check-labels:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Check db change labels
|
||||
id: check_labels
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels
|
||||
echo ${{ github.event.pull_request.number }}
|
||||
echo $URL
|
||||
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
|
||||
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
|
||||
echo "Labels: $LABELS"
|
||||
if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then
|
||||
echo "::error::Pull request must contain either the 'db change' or 'no db change' label."
|
||||
exit 1
|
||||
|
||||
@@ -57,9 +57,17 @@ This command will be triggered to each PR as a requirement for merging it.
|
||||
You can set the `LINDERA_CACHE` environment variable to speed up your successive builds by up to 2 minutes.
|
||||
It'll store some built artifacts in the directory of your choice.
|
||||
|
||||
We recommend using the standard `$HOME/.cache/lindera` directory:
|
||||
We recommend using the `$HOME/.cache/meili/lindera` directory:
|
||||
```sh
|
||||
export LINDERA_CACHE=$HOME/.cache/lindera
|
||||
export LINDERA_CACHE=$HOME/.cache/meili/lindera
|
||||
```
|
||||
|
||||
You can set the `MILLI_BENCH_DATASETS_PATH` environment variable to further speed up your builds.
|
||||
It'll store some big files used for the benchmarks in the directory of your choice.
|
||||
|
||||
We recommend using the `$HOME/.cache/meili/benches` directory:
|
||||
```sh
|
||||
export MILLI_BENCH_DATASETS_PATH=$HOME/.cache/meili/benches
|
||||
```
|
||||
|
||||
Furthermore, you can improve incremental compilation by setting the `MEILI_NO_VERGEN` environment variable.
|
||||
|
||||
687
Cargo.lock
generated
687
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -22,7 +22,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.15.0"
|
||||
version = "1.15.2"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
"Clément Renault <clement@meilisearch.com>",
|
||||
|
||||
@@ -11,27 +11,27 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.95"
|
||||
bumpalo = "3.16.0"
|
||||
anyhow = "1.0.98"
|
||||
bumpalo = "3.18.1"
|
||||
csv = "1.3.1"
|
||||
memmap2 = "0.9.5"
|
||||
milli = { path = "../milli" }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||
tempfile = "3.15.0"
|
||||
mimalloc = { version = "0.1.47", default-features = false }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
tempfile = "3.20.0"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
roaring = "0.10.10"
|
||||
roaring = "0.10.12"
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = "1.0.95"
|
||||
bytes = "1.9.0"
|
||||
convert_case = "0.6.0"
|
||||
flate2 = "1.0.35"
|
||||
reqwest = { version = "0.12.15", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
anyhow = "1.0.98"
|
||||
bytes = "1.10.1"
|
||||
convert_case = "0.8.0"
|
||||
flate2 = "1.1.2"
|
||||
reqwest = { version = "0.12.20", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["milli/all-tokenizations"]
|
||||
|
||||
@@ -67,7 +67,7 @@ fn main() -> anyhow::Result<()> {
|
||||
writeln!(
|
||||
&mut manifest_paths_file,
|
||||
r#"pub const {}: &str = {:?};"#,
|
||||
dataset.to_case(Case::ScreamingSnake),
|
||||
dataset.to_case(Case::UpperSnake),
|
||||
out_file.display(),
|
||||
)?;
|
||||
|
||||
|
||||
@@ -11,8 +11,8 @@ license.workspace = true
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
time = { version = "0.3.37", features = ["parsing"] }
|
||||
time = { version = "0.3.41", features = ["parsing"] }
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = "1.0.95"
|
||||
vergen-git2 = "1.0.2"
|
||||
anyhow = "1.0.98"
|
||||
vergen-git2 = "1.0.7"
|
||||
|
||||
@@ -11,21 +11,21 @@ readme.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.95"
|
||||
flate2 = "1.0.35"
|
||||
http = "1.2.0"
|
||||
anyhow = "1.0.98"
|
||||
flate2 = "1.1.2"
|
||||
http = "1.3.1"
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
once_cell = "1.20.2"
|
||||
once_cell = "1.21.3"
|
||||
regex = "1.11.1"
|
||||
roaring = { version = "0.10.10", features = ["serde"] }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||
tar = "0.4.43"
|
||||
tempfile = "3.15.0"
|
||||
thiserror = "2.0.9"
|
||||
time = { version = "0.3.37", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
roaring = { version = "0.10.12", features = ["serde"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
tar = "0.4.44"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3.41", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
tracing = "0.1.41"
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
big_s = "1.0.2"
|
||||
|
||||
@@ -11,7 +11,7 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
tempfile = "3.15.0"
|
||||
thiserror = "2.0.9"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
tracing = "0.1.41"
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
|
||||
@@ -14,7 +14,7 @@ license.workspace = true
|
||||
[dependencies]
|
||||
nom = "7.1.3"
|
||||
nom_locate = "4.2.0"
|
||||
unescaper = "0.1.5"
|
||||
unescaper = "0.1.6"
|
||||
|
||||
[dev-dependencies]
|
||||
# fixed version due to format breakages in v1.40
|
||||
|
||||
@@ -16,7 +16,7 @@ license.workspace = true
|
||||
serde_json = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
||||
|
||||
[[bench]]
|
||||
name = "benchmarks"
|
||||
|
||||
@@ -12,11 +12,11 @@ license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
arbitrary = { version = "1.4.1", features = ["derive"] }
|
||||
bumpalo = "3.16.0"
|
||||
clap = { version = "4.5.24", features = ["derive"] }
|
||||
either = "1.13.0"
|
||||
bumpalo = "3.18.1"
|
||||
clap = { version = "4.5.40", features = ["derive"] }
|
||||
either = "1.15.0"
|
||||
fastrand = "2.3.0"
|
||||
milli = { path = "../milli" }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||
tempfile = "3.15.0"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
tempfile = "3.20.0"
|
||||
|
||||
@@ -11,31 +11,31 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.95"
|
||||
anyhow = "1.0.98"
|
||||
bincode = "1.3.3"
|
||||
byte-unit = "5.1.6"
|
||||
bumpalo = "3.16.0"
|
||||
bumpalo = "3.18.1"
|
||||
bumparaw-collections = "0.1.4"
|
||||
convert_case = "0.6.0"
|
||||
convert_case = "0.8.0"
|
||||
csv = "1.3.1"
|
||||
derive_builder = "0.20.2"
|
||||
dump = { path = "../dump" }
|
||||
enum-iterator = "2.1.0"
|
||||
file-store = { path = "../file-store" }
|
||||
flate2 = "1.0.35"
|
||||
indexmap = "2.7.0"
|
||||
flate2 = "1.1.2"
|
||||
indexmap = "2.9.0"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
memmap2 = "0.9.5"
|
||||
page_size = "0.6.0"
|
||||
rayon = "1.10.0"
|
||||
roaring = { version = "0.10.10", features = ["serde"] }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = { version = "1.0.138", features = ["preserve_order"] }
|
||||
roaring = { version = "0.10.12", features = ["serde"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
synchronoise = "1.0.1"
|
||||
tempfile = "3.15.0"
|
||||
thiserror = "2.0.9"
|
||||
time = { version = "0.3.37", features = [
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3.41", features = [
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"parsing",
|
||||
@@ -43,7 +43,7 @@ time = { version = "0.3.37", features = [
|
||||
] }
|
||||
tracing = "0.1.41"
|
||||
ureq = "2.12.1"
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
big_s = "1.0.2"
|
||||
|
||||
@@ -55,8 +55,8 @@ use meilisearch_types::features::{
|
||||
ChatCompletionSettings, InstanceTogglableFeatures, Network, RuntimeTogglableFeatures,
|
||||
};
|
||||
use meilisearch_types::heed::byteorder::BE;
|
||||
use meilisearch_types::heed::types::{SerdeJson, Str, I128};
|
||||
use meilisearch_types::heed::{self, Database, Env, RoTxn, RwTxn, WithoutTls};
|
||||
use meilisearch_types::heed::types::{DecodeIgnore, SerdeJson, Str, I128};
|
||||
use meilisearch_types::heed::{self, Database, Env, RoTxn, WithoutTls};
|
||||
use meilisearch_types::milli::index::IndexEmbeddingConfig;
|
||||
use meilisearch_types::milli::update::IndexerConfig;
|
||||
use meilisearch_types::milli::vector::{Embedder, EmbedderOptions, EmbeddingConfigs};
|
||||
@@ -77,6 +77,7 @@ use crate::utils::clamp_to_page_size;
|
||||
pub(crate) type BEI128 = I128<BE>;
|
||||
|
||||
const TASK_SCHEDULER_SIZE_THRESHOLD_PERCENT_INT: u64 = 40;
|
||||
const CHAT_SETTINGS_DB_NAME: &str = "chat-settings";
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexSchedulerOptions {
|
||||
@@ -279,7 +280,7 @@ impl IndexScheduler {
|
||||
let features = features::FeatureData::new(&env, &mut wtxn, options.instance_features)?;
|
||||
let queue = Queue::new(&env, &mut wtxn, &options)?;
|
||||
let index_mapper = IndexMapper::new(&env, &mut wtxn, &options, budget)?;
|
||||
let chat_settings = env.create_database(&mut wtxn, Some("chat-settings"))?;
|
||||
let chat_settings = env.create_database(&mut wtxn, Some(CHAT_SETTINGS_DB_NAME))?;
|
||||
wtxn.commit()?;
|
||||
|
||||
// allow unreachable_code to get rids of the warning in the case of a test build.
|
||||
@@ -310,11 +311,7 @@ impl IndexScheduler {
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
pub fn write_txn(&self) -> Result<RwTxn> {
|
||||
self.env.write_txn().map_err(|e| e.into())
|
||||
}
|
||||
|
||||
pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
||||
fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
||||
self.env.read_txn().map_err(|e| e.into())
|
||||
}
|
||||
|
||||
@@ -556,7 +553,6 @@ impl IndexScheduler {
|
||||
/// And a `Vec` of the workspace_uids
|
||||
pub fn paginated_chat_workspace_uids(
|
||||
&self,
|
||||
_filters: &meilisearch_auth::AuthFilter,
|
||||
from: usize,
|
||||
limit: usize,
|
||||
) -> Result<(usize, Vec<String>)> {
|
||||
@@ -900,21 +896,29 @@ impl IndexScheduler {
|
||||
res.map(EmbeddingConfigs::new)
|
||||
}
|
||||
|
||||
pub fn chat_settings(&self, rtxn: &RoTxn, uid: &str) -> Result<Option<ChatCompletionSettings>> {
|
||||
self.chat_settings.get(rtxn, uid).map_err(Into::into)
|
||||
pub fn chat_settings(&self, uid: &str) -> Result<Option<ChatCompletionSettings>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
self.chat_settings.get(&rtxn, uid).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn put_chat_settings(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
uid: &str,
|
||||
settings: &ChatCompletionSettings,
|
||||
) -> Result<()> {
|
||||
self.chat_settings.put(wtxn, uid, settings).map_err(Into::into)
|
||||
/// Return true if chat workspace exists.
|
||||
pub fn chat_workspace_exists(&self, name: &str) -> Result<bool> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
Ok(self.chat_settings.remap_data_type::<DecodeIgnore>().get(&rtxn, name)?.is_some())
|
||||
}
|
||||
|
||||
pub fn delete_chat_settings(&self, wtxn: &mut RwTxn, uid: &str) -> Result<bool> {
|
||||
self.chat_settings.delete(wtxn, uid).map_err(Into::into)
|
||||
pub fn put_chat_settings(&self, uid: &str, settings: &ChatCompletionSettings) -> Result<()> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
self.chat_settings.put(&mut wtxn, uid, settings)?;
|
||||
wtxn.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_chat_settings(&self, uid: &str) -> Result<bool> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let deleted = self.chat_settings.delete(&mut wtxn, uid)?;
|
||||
wtxn.commit()?;
|
||||
Ok(deleted)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -237,7 +237,7 @@ impl IndexScheduler {
|
||||
#[cfg(test)]
|
||||
self.breakpoint(crate::test_utils::Breakpoint::ProcessBatchSucceeded);
|
||||
|
||||
let (task_progress, task_progress_obj) = AtomicTaskStep::new(tasks.len() as u32);
|
||||
let (task_progress, task_progress_obj) = AtomicTaskStep::new(tasks.len() as u64);
|
||||
progress.update_progress(task_progress_obj);
|
||||
process_batch_info = info;
|
||||
let mut success = 0;
|
||||
@@ -316,7 +316,7 @@ impl IndexScheduler {
|
||||
Err(err) => {
|
||||
#[cfg(test)]
|
||||
self.breakpoint(crate::test_utils::Breakpoint::ProcessBatchFailed);
|
||||
let (task_progress, task_progress_obj) = AtomicTaskStep::new(ids.len() as u32);
|
||||
let (task_progress, task_progress_obj) = AtomicTaskStep::new(ids.len() as u64);
|
||||
progress.update_progress(task_progress_obj);
|
||||
|
||||
if matches!(err, Error::DatabaseUpgrade(_)) {
|
||||
|
||||
@@ -346,8 +346,8 @@ impl IndexScheduler {
|
||||
for (step, swap) in swaps.iter().enumerate() {
|
||||
progress.update_progress(VariableNameStep::<SwappingTheIndexes>::new(
|
||||
format!("swapping index {} and {}", swap.indexes.0, swap.indexes.1),
|
||||
step as u32,
|
||||
swaps.len() as u32,
|
||||
step as u64,
|
||||
swaps.len() as u64,
|
||||
));
|
||||
self.apply_index_swap(
|
||||
&mut wtxn,
|
||||
@@ -425,7 +425,7 @@ impl IndexScheduler {
|
||||
// 3. before_name -> new_name in the task's KindWithContent
|
||||
progress.update_progress(InnerSwappingTwoIndexes::UpdateTheTasks);
|
||||
let tasks_to_update = &index_lhs_task_ids | &index_rhs_task_ids;
|
||||
let (atomic, task_progress) = AtomicTaskStep::new(tasks_to_update.len() as u32);
|
||||
let (atomic, task_progress) = AtomicTaskStep::new(tasks_to_update.len() as u64);
|
||||
progress.update_progress(task_progress);
|
||||
|
||||
for task_id in tasks_to_update {
|
||||
@@ -482,7 +482,7 @@ impl IndexScheduler {
|
||||
// The tasks that have been removed *per batches*.
|
||||
let mut affected_batches: HashMap<BatchId, RoaringBitmap> = HashMap::new();
|
||||
|
||||
let (atomic_progress, task_progress) = AtomicTaskStep::new(to_delete_tasks.len() as u32);
|
||||
let (atomic_progress, task_progress) = AtomicTaskStep::new(to_delete_tasks.len() as u64);
|
||||
progress.update_progress(task_progress);
|
||||
for task_id in to_delete_tasks.iter() {
|
||||
let task =
|
||||
@@ -528,7 +528,7 @@ impl IndexScheduler {
|
||||
|
||||
progress.update_progress(TaskDeletionProgress::DeletingTasksMetadata);
|
||||
let (atomic_progress, task_progress) = AtomicTaskStep::new(
|
||||
(affected_indexes.len() + affected_statuses.len() + affected_kinds.len()) as u32,
|
||||
(affected_indexes.len() + affected_statuses.len() + affected_kinds.len()) as u64,
|
||||
);
|
||||
progress.update_progress(task_progress);
|
||||
for index in affected_indexes.iter() {
|
||||
@@ -547,7 +547,7 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
progress.update_progress(TaskDeletionProgress::DeletingTasks);
|
||||
let (atomic_progress, task_progress) = AtomicTaskStep::new(to_delete_tasks.len() as u32);
|
||||
let (atomic_progress, task_progress) = AtomicTaskStep::new(to_delete_tasks.len() as u64);
|
||||
progress.update_progress(task_progress);
|
||||
for task in to_delete_tasks.iter() {
|
||||
self.queue.tasks.all_tasks.delete(wtxn, &task)?;
|
||||
@@ -564,7 +564,7 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
progress.update_progress(TaskDeletionProgress::DeletingBatches);
|
||||
let (atomic_progress, batch_progress) = AtomicBatchStep::new(affected_batches.len() as u32);
|
||||
let (atomic_progress, batch_progress) = AtomicBatchStep::new(affected_batches.len() as u64);
|
||||
progress.update_progress(batch_progress);
|
||||
for (batch_id, to_delete_tasks) in affected_batches {
|
||||
if let Some(mut tasks) = self.queue.batch_to_tasks_mapping.get(wtxn, &batch_id)? {
|
||||
@@ -737,7 +737,7 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
// 3. We now have a list of tasks to cancel, cancel them
|
||||
let (task_progress, progress_obj) = AtomicTaskStep::new(tasks_to_cancel.len() as u32);
|
||||
let (task_progress, progress_obj) = AtomicTaskStep::new(tasks_to_cancel.len() as u64);
|
||||
progress.update_progress(progress_obj);
|
||||
|
||||
let mut tasks = self.queue.tasks.get_existing_tasks(
|
||||
@@ -748,7 +748,7 @@ impl IndexScheduler {
|
||||
)?;
|
||||
|
||||
progress.update_progress(TaskCancelationProgress::UpdatingTasks);
|
||||
let (task_progress, progress_obj) = AtomicTaskStep::new(tasks_to_cancel.len() as u32);
|
||||
let (task_progress, progress_obj) = AtomicTaskStep::new(tasks_to_cancel.len() as u64);
|
||||
progress.update_progress(progress_obj);
|
||||
for task in tasks.iter_mut() {
|
||||
task.status = Status::Canceled;
|
||||
|
||||
@@ -48,7 +48,7 @@ impl IndexScheduler {
|
||||
let mut dump_tasks = dump.create_tasks_queue()?;
|
||||
|
||||
let (atomic, update_task_progress) =
|
||||
AtomicTaskStep::new(self.queue.tasks.all_tasks.len(&rtxn)? as u32);
|
||||
AtomicTaskStep::new(self.queue.tasks.all_tasks.len(&rtxn)? as u64);
|
||||
progress.update_progress(update_task_progress);
|
||||
|
||||
for ret in self.queue.tasks.all_tasks.iter(&rtxn)? {
|
||||
@@ -110,7 +110,7 @@ impl IndexScheduler {
|
||||
let mut dump_batches = dump.create_batches_queue()?;
|
||||
|
||||
let (atomic_batch_progress, update_batch_progress) =
|
||||
AtomicBatchStep::new(self.queue.batches.all_batches.len(&rtxn)? as u32);
|
||||
AtomicBatchStep::new(self.queue.batches.all_batches.len(&rtxn)? as u64);
|
||||
progress.update_progress(update_batch_progress);
|
||||
|
||||
for ret in self.queue.batches.all_batches.iter(&rtxn)? {
|
||||
@@ -140,7 +140,7 @@ impl IndexScheduler {
|
||||
|
||||
// 4. Dump the indexes
|
||||
progress.update_progress(DumpCreationProgress::DumpTheIndexes);
|
||||
let nb_indexes = self.index_mapper.index_mapping.len(&rtxn)? as u32;
|
||||
let nb_indexes = self.index_mapper.index_mapping.len(&rtxn)? as u64;
|
||||
let mut count = 0;
|
||||
let () = self.index_mapper.try_for_each_index(&rtxn, |uid, index| -> Result<()> {
|
||||
progress.update_progress(VariableNameStep::<DumpCreationProgress>::new(
|
||||
@@ -172,7 +172,7 @@ impl IndexScheduler {
|
||||
let nb_documents = index
|
||||
.number_of_documents(&rtxn)
|
||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?
|
||||
as u32;
|
||||
as u64;
|
||||
let (atomic, update_document_progress) = AtomicDocumentStep::new(nb_documents);
|
||||
progress.update_progress(update_document_progress);
|
||||
let documents = index
|
||||
|
||||
@@ -5,7 +5,7 @@ use meilisearch_types::milli::documents::PrimaryKey;
|
||||
use meilisearch_types::milli::progress::Progress;
|
||||
use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction};
|
||||
use meilisearch_types::milli::update::DocumentAdditionResult;
|
||||
use meilisearch_types::milli::{self, ChannelCongestion, Filter, ThreadPoolNoAbortBuilder};
|
||||
use meilisearch_types::milli::{self, ChannelCongestion, Filter};
|
||||
use meilisearch_types::settings::apply_settings_to_builder;
|
||||
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
|
||||
use meilisearch_types::Index;
|
||||
@@ -113,18 +113,8 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges);
|
||||
let (document_changes, operation_stats, primary_key) = indexer
|
||||
@@ -266,18 +256,8 @@ impl IndexScheduler {
|
||||
|
||||
let mut congestion = None;
|
||||
if task.error.is_none() {
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let candidates_count = candidates.len();
|
||||
progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges);
|
||||
@@ -429,18 +409,8 @@ impl IndexScheduler {
|
||||
|
||||
let mut congestion = None;
|
||||
if !tasks.iter().all(|res| res.error.is_some()) {
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
progress.update_progress(DocumentDeletionProgress::DeleteDocuments);
|
||||
let mut indexer = indexer::DocumentDeletion::new();
|
||||
|
||||
@@ -58,7 +58,7 @@ impl IndexScheduler {
|
||||
// 2.4 Only copy the update files of the enqueued tasks
|
||||
progress.update_progress(SnapshotCreationProgress::SnapshotTheUpdateFiles);
|
||||
let enqueued = self.queue.tasks.get_status(&rtxn, Status::Enqueued)?;
|
||||
let (atomic, update_file_progress) = AtomicUpdateFileStep::new(enqueued.len() as u32);
|
||||
let (atomic, update_file_progress) = AtomicUpdateFileStep::new(enqueued.len() as u64);
|
||||
progress.update_progress(update_file_progress);
|
||||
for task_id in enqueued {
|
||||
let task =
|
||||
@@ -74,12 +74,12 @@ impl IndexScheduler {
|
||||
// 3. Snapshot every indexes
|
||||
progress.update_progress(SnapshotCreationProgress::SnapshotTheIndexes);
|
||||
let index_mapping = self.index_mapper.index_mapping;
|
||||
let nb_indexes = index_mapping.len(&rtxn)? as u32;
|
||||
let nb_indexes = index_mapping.len(&rtxn)? as u64;
|
||||
|
||||
for (i, result) in index_mapping.iter(&rtxn)?.enumerate() {
|
||||
let (name, uuid) = result?;
|
||||
progress.update_progress(VariableNameStep::<SnapshotCreationProgress>::new(
|
||||
name, i as u32, nb_indexes,
|
||||
name, i as u64, nb_indexes,
|
||||
));
|
||||
let index = self.index_mapper.index(&rtxn, name)?;
|
||||
let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string());
|
||||
|
||||
@@ -22,8 +22,8 @@ impl IndexScheduler {
|
||||
}
|
||||
progress.update_progress(VariableNameStep::<UpgradeIndex>::new(
|
||||
format!("Upgrading index `{uid}`"),
|
||||
i as u32,
|
||||
indexes.len() as u32,
|
||||
i as u64,
|
||||
indexes.len() as u64,
|
||||
));
|
||||
let index = self.index(uid)?;
|
||||
let mut index_wtxn = index.write_txn()?;
|
||||
@@ -65,8 +65,8 @@ impl IndexScheduler {
|
||||
for (i, uid) in indexes.iter().enumerate() {
|
||||
progress.update_progress(VariableNameStep::<UpgradeIndex>::new(
|
||||
format!("Rollbacking index `{uid}`"),
|
||||
i as u32,
|
||||
indexes.len() as u32,
|
||||
i as u64,
|
||||
indexes.len() as u64,
|
||||
));
|
||||
let index_schd_rtxn = self.env.read_txn()?;
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
@@ -37,7 +37,7 @@ catto [1,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
----------------------------------------------------------------------
|
||||
@@ -40,7 +40,7 @@ doggo [2,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -43,7 +43,7 @@ doggo [2,3,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -15,7 +15,7 @@ license.workspace = true
|
||||
serde_json = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = "0.5.1"
|
||||
criterion = "0.6.0"
|
||||
|
||||
[[bench]]
|
||||
name = "depth"
|
||||
|
||||
@@ -14,4 +14,6 @@ license.workspace = true
|
||||
# fixed version due to format breakages in v1.40
|
||||
insta = { version = "=1.39.0", features = ["json", "redactions"] }
|
||||
md5 = "0.7.0"
|
||||
once_cell = "1.20"
|
||||
once_cell = "1.21"
|
||||
regex-lite = "0.1.6"
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
|
||||
@@ -4,9 +4,16 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub use insta;
|
||||
use insta::internals::{Content, ContentPath};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex_lite::Regex;
|
||||
|
||||
static SNAPSHOT_NAMES: Lazy<Mutex<HashMap<PathBuf, usize>>> = Lazy::new(Mutex::default);
|
||||
/// A regex to match UUIDs in messages, specifically looking for the UUID v4 format
|
||||
static UUID_IN_MESSAGE_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}")
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// Return the md5 hash of the given string
|
||||
pub fn hash_snapshot(snap: &str) -> String {
|
||||
@@ -26,6 +33,39 @@ pub fn default_snapshot_settings_for_test<'a>(
|
||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||
settings.set_omit_expression(true);
|
||||
|
||||
fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content {
|
||||
match &content {
|
||||
Content::String(s) => {
|
||||
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]");
|
||||
Content::String(uuid_replaced.to_string())
|
||||
}
|
||||
_ => content,
|
||||
}
|
||||
}
|
||||
|
||||
fn uuid_in_json_key_redaction(content: Content, _content_path: ContentPath) -> Content {
|
||||
match content {
|
||||
Content::Map(map) => {
|
||||
let new_map = map
|
||||
.iter()
|
||||
.map(|(key, value)| match key {
|
||||
Content::String(s) => {
|
||||
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]");
|
||||
(Content::String(uuid_replaced.to_string()), value.clone())
|
||||
}
|
||||
_ => (key.clone(), value.clone()),
|
||||
})
|
||||
.collect();
|
||||
Content::Map(new_map)
|
||||
}
|
||||
_ => content,
|
||||
}
|
||||
}
|
||||
|
||||
settings.add_dynamic_redaction(".**.message", uuid_in_message_redaction);
|
||||
settings.add_dynamic_redaction(".**.indexUid", uuid_in_message_redaction);
|
||||
settings.add_dynamic_redaction(".**.facetsByIndex", uuid_in_json_key_redaction);
|
||||
|
||||
let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name);
|
||||
let test_name = test_name.rsplit("::").next().unwrap().to_owned();
|
||||
|
||||
@@ -232,6 +272,9 @@ macro_rules! json_string {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate as meili_snap;
|
||||
use crate::UUID_IN_MESSAGE_RE;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn snap() {
|
||||
snapshot_hash!(10, @"d3d9446802a44259755d38e6d163e820");
|
||||
@@ -279,4 +322,14 @@ mod tests {
|
||||
// snapshot_hash!("", name: "", @"d41d8cd98f00b204e9800998ecf8427e");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uuid_in_message_regex() {
|
||||
let uuid1 = Uuid::new_v4();
|
||||
let uuid2 = Uuid::new_v4();
|
||||
let uuid3 = Uuid::new_v4();
|
||||
let to_replace = format!("1 {uuid1} 2 {uuid2} 3 {uuid3} 4");
|
||||
let replaced = UUID_IN_MESSAGE_RE.replace_all(to_replace.as_str(), "[uuid]");
|
||||
assert_eq!(replaced, "1 [uuid] 2 [uuid] 3 [uuid] 4");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,10 +17,10 @@ hmac = "0.12.1"
|
||||
maplit = "1.0.2"
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
rand = "0.8.5"
|
||||
roaring = { version = "0.10.10", features = ["serde"] }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||
sha2 = "0.10.8"
|
||||
thiserror = "2.0.9"
|
||||
time = { version = "0.3.37", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
roaring = { version = "0.10.12", features = ["serde"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
sha2 = "0.10.9"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3.41", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
|
||||
@@ -125,12 +125,11 @@ impl HeedAuthStore {
|
||||
Action::MetricsAll => {
|
||||
actions.insert(Action::MetricsGet);
|
||||
}
|
||||
Action::ChatsAll => {
|
||||
actions.extend([Action::ChatsGet, Action::ChatsDelete]);
|
||||
}
|
||||
Action::ChatsSettingsAll => {
|
||||
actions.extend([
|
||||
Action::ChatsSettingsGet,
|
||||
Action::ChatsSettingsUpdate,
|
||||
Action::ChatsSettingsDelete,
|
||||
]);
|
||||
actions.extend([Action::ChatsSettingsGet, Action::ChatsSettingsUpdate]);
|
||||
}
|
||||
other => {
|
||||
actions.insert(*other);
|
||||
|
||||
@@ -11,37 +11,37 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.9.0", default-features = false }
|
||||
anyhow = "1.0.95"
|
||||
bumpalo = "3.16.0"
|
||||
actix-web = { version = "4.11.0", default-features = false }
|
||||
anyhow = "1.0.98"
|
||||
bumpalo = "3.18.1"
|
||||
bumparaw-collections = "0.1.4"
|
||||
convert_case = "0.6.0"
|
||||
convert_case = "0.8.0"
|
||||
csv = "1.3.1"
|
||||
deserr = { version = "0.6.3", features = ["actix-web"] }
|
||||
either = { version = "1.13.0", features = ["serde"] }
|
||||
either = { version = "1.15.0", features = ["serde"] }
|
||||
enum-iterator = "2.1.0"
|
||||
file-store = { path = "../file-store" }
|
||||
flate2 = "1.0.35"
|
||||
flate2 = "1.1.2"
|
||||
fst = "0.4.7"
|
||||
memmap2 = "0.9.5"
|
||||
milli = { path = "../milli" }
|
||||
roaring = { version = "0.10.10", features = ["serde"] }
|
||||
rustc-hash = "2.1.0"
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
roaring = { version = "0.10.12", features = ["serde"] }
|
||||
rustc-hash = "2.1.1"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde-cs = "0.2.4"
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||
tar = "0.4.43"
|
||||
tempfile = "3.15.0"
|
||||
thiserror = "2.0.9"
|
||||
time = { version = "0.3.37", features = [
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
tar = "0.4.44"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3.41", features = [
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"parsing",
|
||||
"macros",
|
||||
] }
|
||||
tokio = "1.43"
|
||||
utoipa = { version = "5.3.1", features = ["macros"] }
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
tokio = "1.45"
|
||||
utoipa = { version = "5.4.0", features = ["macros"] }
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
# fixed version due to format breakages in v1.40
|
||||
|
||||
@@ -390,7 +390,11 @@ InvalidDocumentEditionFunctionFilter , InvalidRequest , BAD_REQU
|
||||
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsIndexChat , InvalidRequest , BAD_REQUEST ;
|
||||
// Experimental features - Chat Completions
|
||||
ChatWorkspaceNotFound , InvalidRequest , NOT_FOUND ;
|
||||
UnimplementedExternalFunctionCalling , InvalidRequest , NOT_IMPLEMENTED ;
|
||||
UnimplementedNonStreamingChatCompletions , InvalidRequest , NOT_IMPLEMENTED ;
|
||||
UnimplementedMultiChoiceChatCompletions , InvalidRequest , NOT_IMPLEMENTED ;
|
||||
ChatNotFound , InvalidRequest , NOT_FOUND ;
|
||||
InvalidChatSettingDocumentTemplate , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionOrgId , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionProjectId , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionApiVersion , InvalidRequest , BAD_REQUEST ;
|
||||
@@ -446,6 +450,7 @@ impl ErrorCode for milli::Error {
|
||||
| UserError::InvalidSettingsDimensions { .. }
|
||||
| UserError::InvalidUrl { .. }
|
||||
| UserError::InvalidSettingsDocumentTemplateMaxBytes { .. }
|
||||
| UserError::InvalidChatSettingsDocumentTemplateMaxBytes
|
||||
| UserError::InvalidPrompt(_)
|
||||
| UserError::InvalidDisableBinaryQuantization { .. }
|
||||
| UserError::InvalidSourceForNested { .. }
|
||||
|
||||
@@ -2,12 +2,13 @@ use std::collections::BTreeMap;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::{Code, ResponseError};
|
||||
|
||||
pub const DEFAULT_CHAT_SYSTEM_PROMPT: &str = "You are a highly capable research assistant with access to powerful search tools. IMPORTANT INSTRUCTIONS:1. When answering questions, you MUST make multiple tool calls (at least 2-3) to gather comprehensive information.2. Use different search queries for each tool call - vary keywords, rephrase questions, and explore different semantic angles to ensure broad coverage.3. Always explicitly announce BEFORE making each tool call by saying: \"I'll search for [specific information] now.\"4. Combine information from ALL tool calls to provide complete, nuanced answers rather than relying on a single source.5. For complex topics, break down your research into multiple targeted queries rather than using a single generic search.";
|
||||
pub const DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT: &str =
|
||||
"Search the database for relevant JSON documents using an optional query.";
|
||||
pub const DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT: &str = "The search query string used to find relevant documents in the index. This should contain keywords or phrases that best represent what the user is looking for. More specific queries will yield more precise results.";
|
||||
pub const DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT: &str = "The name of the index to search within. An index is a collection of documents organized for search. Selecting the right index ensures the most relevant results for the user query. You have access to two indexes: movies, steam. The movies index contains movies with overviews. The steam index contains steam games from the Steam platform with their prices";
|
||||
pub const DEFAULT_CHAT_PRE_QUERY_PROMPT: &str = "";
|
||||
pub const DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT: &str = "The name of the index to search within. An index is a collection of documents organized for search. Selecting the right index ensures the most relevant results for the user query.";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
@@ -59,7 +60,7 @@ pub struct ChatCompletionSettings {
|
||||
#[serde(default)]
|
||||
pub deployment_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub base_api: Option<String>,
|
||||
pub base_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub api_key: Option<String>,
|
||||
#[serde(default)]
|
||||
@@ -89,9 +90,24 @@ impl ChatCompletionSettings {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> Result<(), ResponseError> {
|
||||
use ChatCompletionSource::*;
|
||||
match self {
|
||||
Self { source: AzureOpenAi, base_url, deployment_id, api_version, .. } if base_url.is_none() || deployment_id.is_none() || api_version.is_none() => Err(ResponseError::from_msg(
|
||||
"azureOpenAi requires setting a valid `baseUrl`, `deploymentId`, and `apiVersion`".to_string(),
|
||||
Code::BadRequest,
|
||||
)),
|
||||
Self { source: VLlm, base_url, .. } if base_url.is_none() => Err(ResponseError::from_msg(
|
||||
"vLlm requires setting a valid `baseUrl`".to_string(),
|
||||
Code::BadRequest,
|
||||
)),
|
||||
_otherwise => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum ChatCompletionSource {
|
||||
#[default]
|
||||
@@ -128,7 +144,8 @@ impl ChatCompletionSource {
|
||||
/// Old OpenAI models use the system role while new ones use the developer role.
|
||||
fn old_openai_model(model: &str) -> bool {
|
||||
["gpt-3.5", "gpt-4", "gpt-4.1", "gpt-4.5", "gpt-4o", "chatgpt-4o"].iter().any(|old| {
|
||||
model.starts_with(old) && model.chars().nth(old.len()).is_none_or(|last| last == '-')
|
||||
model.starts_with(old)
|
||||
&& model.chars().nth(old.chars().count()).is_none_or(|last| last == '-')
|
||||
})
|
||||
}
|
||||
|
||||
@@ -137,7 +154,7 @@ impl ChatCompletionSource {
|
||||
match self {
|
||||
OpenAi => Some("https://api.openai.com/v1/"),
|
||||
Mistral => Some("https://api.mistral.ai/v1/"),
|
||||
Gemini => Some("https://generativelanguage.googleapis.com/v1beta/openai/"),
|
||||
Gemini => Some("https://generativelanguage.googleapis.com/v1beta/openai"),
|
||||
AzureOpenAi | VLlm => None,
|
||||
}
|
||||
}
|
||||
@@ -150,7 +167,6 @@ pub struct ChatCompletionPrompts {
|
||||
pub search_description: String,
|
||||
pub search_q_param: String,
|
||||
pub search_index_uid_param: String,
|
||||
pub pre_query: String,
|
||||
}
|
||||
|
||||
impl Default for ChatCompletionPrompts {
|
||||
@@ -160,7 +176,6 @@ impl Default for ChatCompletionPrompts {
|
||||
search_description: DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT.to_string(),
|
||||
search_q_param: DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT.to_string(),
|
||||
search_index_uid_param: DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT.to_string(),
|
||||
pre_query: DEFAULT_CHAT_PRE_QUERY_PROMPT.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -323,13 +323,18 @@ pub enum Action {
|
||||
#[serde(rename = "network.update")]
|
||||
#[deserr(rename = "network.update")]
|
||||
NetworkUpdate,
|
||||
// TODO should we rename it chatCompletions.get ?
|
||||
#[serde(rename = "chatCompletion")]
|
||||
#[deserr(rename = "chatCompletion")]
|
||||
#[serde(rename = "chatCompletions")]
|
||||
#[deserr(rename = "chatCompletions")]
|
||||
ChatCompletions,
|
||||
#[serde(rename = "chats.*")]
|
||||
#[deserr(rename = "chats.*")]
|
||||
ChatsAll,
|
||||
#[serde(rename = "chats.get")]
|
||||
#[deserr(rename = "chats.get")]
|
||||
ChatsGet,
|
||||
#[serde(rename = "chats.delete")]
|
||||
#[deserr(rename = "chats.delete")]
|
||||
ChatsDelete,
|
||||
#[serde(rename = "chatsSettings.*")]
|
||||
#[deserr(rename = "chatsSettings.*")]
|
||||
ChatsSettingsAll,
|
||||
@@ -339,9 +344,6 @@ pub enum Action {
|
||||
#[serde(rename = "chatsSettings.update")]
|
||||
#[deserr(rename = "chatsSettings.update")]
|
||||
ChatsSettingsUpdate,
|
||||
#[serde(rename = "chatsSettings.delete")]
|
||||
#[deserr(rename = "chatsSettings.delete")]
|
||||
ChatsSettingsDelete,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
@@ -368,11 +370,12 @@ impl Action {
|
||||
SETTINGS_GET => Some(Self::SettingsGet),
|
||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||
CHAT_COMPLETIONS => Some(Self::ChatCompletions),
|
||||
CHATS_ALL => Some(Self::ChatsAll),
|
||||
CHATS_GET => Some(Self::ChatsGet),
|
||||
CHATS_DELETE => Some(Self::ChatsDelete),
|
||||
CHATS_SETTINGS_ALL => Some(Self::ChatsSettingsAll),
|
||||
CHATS_SETTINGS_GET => Some(Self::ChatsSettingsGet),
|
||||
CHATS_SETTINGS_UPDATE => Some(Self::ChatsSettingsUpdate),
|
||||
CHATS_SETTINGS_DELETE => Some(Self::ChatsSettingsDelete),
|
||||
STATS_ALL => Some(Self::StatsAll),
|
||||
STATS_GET => Some(Self::StatsGet),
|
||||
METRICS_ALL => Some(Self::MetricsAll),
|
||||
@@ -439,9 +442,10 @@ pub mod actions {
|
||||
pub const NETWORK_UPDATE: u8 = NetworkUpdate.repr();
|
||||
|
||||
pub const CHAT_COMPLETIONS: u8 = ChatCompletions.repr();
|
||||
pub const CHATS_ALL: u8 = ChatsAll.repr();
|
||||
pub const CHATS_GET: u8 = ChatsGet.repr();
|
||||
pub const CHATS_DELETE: u8 = ChatsDelete.repr();
|
||||
pub const CHATS_SETTINGS_ALL: u8 = ChatsSettingsAll.repr();
|
||||
pub const CHATS_SETTINGS_GET: u8 = ChatsSettingsGet.repr();
|
||||
pub const CHATS_SETTINGS_UPDATE: u8 = ChatsSettingsUpdate.repr();
|
||||
pub const CHATS_SETTINGS_DELETE: u8 = ChatsSettingsDelete.repr();
|
||||
}
|
||||
|
||||
@@ -697,7 +697,7 @@ pub fn apply_settings_to_builder(
|
||||
match typo_tolerance {
|
||||
Setting::Set(ref value) => {
|
||||
match value.enabled {
|
||||
Setting::Set(val) => builder.set_autorize_typos(val),
|
||||
Setting::Set(val) => builder.set_authorize_typos(val),
|
||||
Setting::Reset => builder.reset_authorize_typos(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
@@ -13,52 +13,50 @@ license.workspace = true
|
||||
default-run = "meilisearch"
|
||||
|
||||
[dependencies]
|
||||
actix-cors = "0.7.0"
|
||||
actix-http = { version = "3.9.0", default-features = false, features = [
|
||||
actix-cors = "0.7.1"
|
||||
actix-http = { version = "3.11.0", default-features = false, features = [
|
||||
"compress-brotli",
|
||||
"compress-gzip",
|
||||
"rustls-0_23",
|
||||
] }
|
||||
actix-utils = "3.0.1"
|
||||
actix-web = { version = "4.9.0", default-features = false, features = [
|
||||
actix-web = { version = "4.11.0", default-features = false, features = [
|
||||
"macros",
|
||||
"compress-brotli",
|
||||
"compress-gzip",
|
||||
"cookies",
|
||||
"rustls-0_23",
|
||||
] }
|
||||
anyhow = { version = "1.0.95", features = ["backtrace"] }
|
||||
async-trait = "0.1.85"
|
||||
bstr = "1.11.3"
|
||||
anyhow = { version = "1.0.98", features = ["backtrace"] }
|
||||
bstr = "1.12.0"
|
||||
byte-unit = { version = "5.1.6", features = ["serde"] }
|
||||
bytes = "1.9.0"
|
||||
bumpalo = "3.16.0"
|
||||
clap = { version = "4.5.24", features = ["derive", "env"] }
|
||||
bytes = "1.10.1"
|
||||
bumpalo = "3.18.1"
|
||||
clap = { version = "4.5.40", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.15"
|
||||
deserr = { version = "0.6.3", features = ["actix-web"] }
|
||||
dump = { path = "../dump" }
|
||||
either = "1.13.0"
|
||||
either = "1.15.0"
|
||||
file-store = { path = "../file-store" }
|
||||
flate2 = "1.0.35"
|
||||
flate2 = "1.1.2"
|
||||
fst = "0.4.7"
|
||||
futures = "0.3.31"
|
||||
futures-util = "0.3.31"
|
||||
index-scheduler = { path = "../index-scheduler" }
|
||||
indexmap = { version = "2.7.0", features = ["serde"] }
|
||||
is-terminal = "0.4.13"
|
||||
indexmap = { version = "2.9.0", features = ["serde"] }
|
||||
is-terminal = "0.4.16"
|
||||
itertools = "0.14.0"
|
||||
jsonwebtoken = "9.3.0"
|
||||
jsonwebtoken = "9.3.1"
|
||||
lazy_static = "1.5.0"
|
||||
liquid = "0.26.9"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
mimalloc = { version = "0.1.47", default-features = false }
|
||||
mime = "0.3.17"
|
||||
num_cpus = "1.16.0"
|
||||
num_cpus = "1.17.0"
|
||||
obkv = "0.3.0"
|
||||
once_cell = "1.20.2"
|
||||
ordered-float = "4.6.0"
|
||||
parking_lot = "0.12.3"
|
||||
once_cell = "1.21.3"
|
||||
ordered-float = "5.0.0"
|
||||
parking_lot = "0.12.4"
|
||||
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
||||
pin-project-lite = "0.2.16"
|
||||
platform-dirs = "0.3.0"
|
||||
@@ -66,44 +64,44 @@ prometheus = { version = "0.14.0", features = ["process"] }
|
||||
rand = "0.8.5"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.11.1"
|
||||
reqwest = { version = "0.12.12", features = [
|
||||
reqwest = { version = "0.12.20", features = [
|
||||
"rustls-tls",
|
||||
"json",
|
||||
], default-features = false }
|
||||
rustls = { version = "0.23.20", features = ["ring"], default-features = false }
|
||||
rustls-pki-types = { version = "1.10.1", features = ["alloc"] }
|
||||
rustls = { version = "0.23.28", features = ["ring"], default-features = false }
|
||||
rustls-pki-types = { version = "1.12.0", features = ["alloc"] }
|
||||
rustls-pemfile = "2.2.0"
|
||||
segment = { version = "0.2.5" }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||
sha2 = "0.10.8"
|
||||
segment = { version = "0.2.6" }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
sha2 = "0.10.9"
|
||||
siphasher = "1.0.1"
|
||||
slice-group-by = "0.3.1"
|
||||
static-files = { version = "0.2.4", optional = true }
|
||||
sysinfo = "0.33.1"
|
||||
tar = "0.4.43"
|
||||
tempfile = "3.15.0"
|
||||
thiserror = "2.0.9"
|
||||
time = { version = "0.3.37", features = [
|
||||
static-files = { version = "0.2.5", optional = true }
|
||||
sysinfo = "0.35.2"
|
||||
tar = "0.4.44"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3.41", features = [
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"parsing",
|
||||
"macros",
|
||||
] }
|
||||
tokio = { version = "1.43.1", features = ["full"] }
|
||||
toml = "0.8.19"
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
tokio = { version = "1.45.1", features = ["full"] }
|
||||
toml = "0.8.23"
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
serde_urlencoded = "0.7.1"
|
||||
termcolor = "1.4.1"
|
||||
url = { version = "2.5.4", features = ["serde"] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = { version = "0.3.19", features = ["json"] }
|
||||
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
|
||||
tracing-actix-web = "0.7.15"
|
||||
tracing-actix-web = "0.7.18"
|
||||
build-info = { version = "1.7.0", path = "../build-info" }
|
||||
roaring = "0.10.10"
|
||||
roaring = "0.10.12"
|
||||
mopa-maintained = "0.2.3"
|
||||
utoipa = { version = "5.3.1", features = [
|
||||
utoipa = { version = "5.4.0", features = [
|
||||
"actix_extras",
|
||||
"macros",
|
||||
"non_strict_integers",
|
||||
@@ -119,29 +117,29 @@ actix-web-lab = { version = "0.24.1", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.10.0"
|
||||
brotli = "6.0.0"
|
||||
brotli = "8.0.1"
|
||||
# fixed version due to format breakages in v1.40
|
||||
insta = "=1.39.0"
|
||||
insta = { version = "=1.39.0", features = ["redactions"] }
|
||||
manifest-dir-macros = "0.1.18"
|
||||
maplit = "1.0.2"
|
||||
meili-snap = { path = "../meili-snap" }
|
||||
temp-env = "0.3.6"
|
||||
urlencoding = "2.1.3"
|
||||
wiremock = "0.6.2"
|
||||
wiremock = "0.6.3"
|
||||
yaup = "0.3.1"
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = { version = "1.0.95", optional = true }
|
||||
cargo_toml = { version = "0.21.0", optional = true }
|
||||
anyhow = { version = "1.0.98", optional = true }
|
||||
cargo_toml = { version = "0.22.1", optional = true }
|
||||
hex = { version = "0.4.3", optional = true }
|
||||
reqwest = { version = "0.12.12", features = [
|
||||
reqwest = { version = "0.12.20", features = [
|
||||
"blocking",
|
||||
"rustls-tls",
|
||||
], default-features = false, optional = true }
|
||||
sha-1 = { version = "0.10.1", optional = true }
|
||||
static-files = { version = "0.2.4", optional = true }
|
||||
tempfile = { version = "3.15.0", optional = true }
|
||||
zip = { version = "2.3.0", optional = true }
|
||||
static-files = { version = "0.2.5", optional = true }
|
||||
tempfile = { version = "3.20.0", optional = true }
|
||||
zip = { version = "4.1.0", optional = true }
|
||||
|
||||
[features]
|
||||
default = ["meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||
|
||||
@@ -37,7 +37,9 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
||||
use meilisearch_auth::{open_auth_store_env, AuthController};
|
||||
use meilisearch_types::milli::constants::VERSION_MAJOR;
|
||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
||||
use meilisearch_types::milli::update::{
|
||||
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
|
||||
};
|
||||
use meilisearch_types::settings::apply_settings_to_builder;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use meilisearch_types::versioning::{
|
||||
@@ -501,7 +503,19 @@ fn import_dump(
|
||||
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
||||
index_scheduler.put_network(network)?;
|
||||
|
||||
let indexer_config = index_scheduler.indexer_config();
|
||||
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
let backup_config;
|
||||
let base_config = index_scheduler.indexer_config();
|
||||
|
||||
let indexer_config = if base_config.max_threads.is_none() {
|
||||
let (thread_pool, _) = default_thread_pool_and_threads();
|
||||
|
||||
let _config = IndexerConfig { thread_pool, ..*base_config };
|
||||
backup_config = _config;
|
||||
&backup_config
|
||||
} else {
|
||||
base_config
|
||||
};
|
||||
|
||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||
// try to process tasks while we're trying to import the indexes.
|
||||
|
||||
@@ -761,10 +761,12 @@ impl IndexerOpts {
|
||||
max_indexing_memory.to_string(),
|
||||
);
|
||||
}
|
||||
export_to_env_if_not_present(
|
||||
MEILI_MAX_INDEXING_THREADS,
|
||||
max_indexing_threads.0.to_string(),
|
||||
);
|
||||
if let Some(max_indexing_threads) = max_indexing_threads.0 {
|
||||
export_to_env_if_not_present(
|
||||
MEILI_MAX_INDEXING_THREADS,
|
||||
max_indexing_threads.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -772,15 +774,15 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
|
||||
let thread_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|index| format!("indexing-thread:{index}"))
|
||||
.num_threads(*other.max_indexing_threads)
|
||||
let thread_pool = ThreadPoolNoAbortBuilder::new_for_indexing()
|
||||
.num_threads(other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2))
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
thread_pool,
|
||||
log_every_n: Some(DEFAULT_LOG_EVERY_N),
|
||||
max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize),
|
||||
thread_pool: Some(thread_pool),
|
||||
max_threads: *other.max_indexing_threads,
|
||||
max_positions_per_attributes: None,
|
||||
skip_index_budget: other.skip_index_budget,
|
||||
..Default::default()
|
||||
@@ -843,31 +845,31 @@ fn total_memory_bytes() -> Option<u64> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
pub struct MaxThreads(usize);
|
||||
#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
pub struct MaxThreads(Option<usize>);
|
||||
|
||||
impl FromStr for MaxThreads {
|
||||
type Err = ParseIntError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
usize::from_str(s).map(Self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MaxThreads {
|
||||
fn default() -> Self {
|
||||
MaxThreads(num_cpus::get() / 2)
|
||||
fn from_str(s: &str) -> Result<MaxThreads, Self::Err> {
|
||||
if s.is_empty() || s == "unlimited" {
|
||||
return Ok(MaxThreads::default());
|
||||
}
|
||||
usize::from_str(s).map(Some).map(MaxThreads)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for MaxThreads {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
match self.0 {
|
||||
Some(threads) => write!(f, "{}", threads),
|
||||
None => write!(f, "unlimited"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for MaxThreads {
|
||||
type Target = usize;
|
||||
type Target = Option<usize>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
|
||||
@@ -1,291 +0,0 @@
|
||||
use std::mem;
|
||||
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{Either, HttpResponse, Responder};
|
||||
use actix_web_lab::sse::{self, Event};
|
||||
use async_openai::config::OpenAIConfig;
|
||||
use async_openai::types::{
|
||||
ChatCompletionRequestAssistantMessageArgs, ChatCompletionRequestMessage,
|
||||
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
|
||||
ChatCompletionToolArgs, ChatCompletionToolType, CreateChatCompletionRequest, FinishReason,
|
||||
FunctionObjectArgs,
|
||||
};
|
||||
use async_openai::Client;
|
||||
use futures::StreamExt;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::index::IndexEmbeddingConfig;
|
||||
use meilisearch_types::milli::prompt::PromptData;
|
||||
use meilisearch_types::milli::vector::EmbeddingConfig;
|
||||
use meilisearch_types::{Document, Index};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
||||
use crate::routes::indexes::search::search_kind;
|
||||
use crate::search::{
|
||||
add_search_rules, perform_search, HybridQuery, RetrieveVectors, SearchQuery, SemanticRatio,
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
/// The default description of the searchInIndex tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_TOOL_DESCRIPTION: &str =
|
||||
"Search the database for relevant JSON documents using an optional query.";
|
||||
/// The default description of the searchInIndex `q` parameter tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_Q_PARAMETER_TOOL_DESCRIPTION: &str =
|
||||
"The search query string used to find relevant documents in the index. \
|
||||
This should contain keywords or phrases that best represent what the user is looking for. \
|
||||
More specific queries will yield more precise results.";
|
||||
/// The default description of the searchInIndex `index` parameter tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_INDEX_PARAMETER_TOOL_DESCRIPTION: &str =
|
||||
"The name of the index to search within. An index is a collection of documents organized for search. \
|
||||
Selecting the right index ensures the most relevant results for the user query";
|
||||
|
||||
const EMBEDDER_NAME: &str = "openai";
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(chat)));
|
||||
}
|
||||
|
||||
/// Get a chat completion
|
||||
async fn chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_GET }>, Data<IndexScheduler>>,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
web::Json(mut chat_completion): web::Json<CreateChatCompletionRequest>,
|
||||
) -> impl Responder {
|
||||
// To enable later on, when the feature will be experimental
|
||||
// index_scheduler.features().check_chat("Using the /chat route")?;
|
||||
|
||||
if chat_completion.stream.unwrap_or(false) {
|
||||
Either::Right(streamed_chat(index_scheduler, search_queue, chat_completion).await)
|
||||
} else {
|
||||
Either::Left(non_streamed_chat(index_scheduler, search_queue, chat_completion).await)
|
||||
}
|
||||
}
|
||||
|
||||
async fn non_streamed_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_GET }>, Data<IndexScheduler>>,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let api_key = std::env::var("MEILI_OPENAI_API_KEY")
|
||||
.expect("cannot find OpenAI API Key (MEILI_OPENAI_API_KEY)");
|
||||
let config = OpenAIConfig::default().with_api_key(&api_key); // we can also change the API base
|
||||
let client = Client::with_config(config);
|
||||
|
||||
assert_eq!(
|
||||
chat_completion.n.unwrap_or(1),
|
||||
1,
|
||||
"Meilisearch /chat only support one completion at a time (n = 1, n = null)"
|
||||
);
|
||||
|
||||
let rtxn = index_scheduler.read_txn().unwrap();
|
||||
let search_in_index_description = index_scheduler
|
||||
.chat_prompts(&rtxn, "searchInIndex-description")
|
||||
.unwrap()
|
||||
.unwrap_or(DEFAULT_SEARCH_IN_INDEX_TOOL_DESCRIPTION)
|
||||
.to_string();
|
||||
let search_in_index_q_param_description = index_scheduler
|
||||
.chat_prompts(&rtxn, "searchInIndex-q-param-description")
|
||||
.unwrap()
|
||||
.unwrap_or(DEFAULT_SEARCH_IN_INDEX_Q_PARAMETER_TOOL_DESCRIPTION)
|
||||
.to_string();
|
||||
let search_in_index_index_description = index_scheduler
|
||||
.chat_prompts(&rtxn, "searchInIndex-index-param-description")
|
||||
.unwrap()
|
||||
.unwrap_or(DEFAULT_SEARCH_IN_INDEX_INDEX_PARAMETER_TOOL_DESCRIPTION)
|
||||
.to_string();
|
||||
drop(rtxn);
|
||||
|
||||
let mut response;
|
||||
loop {
|
||||
let tools = chat_completion.tools.get_or_insert_default();
|
||||
tools.push(
|
||||
ChatCompletionToolArgs::default()
|
||||
.r#type(ChatCompletionToolType::Function)
|
||||
.function(
|
||||
FunctionObjectArgs::default()
|
||||
.name("searchInIndex")
|
||||
.description(&search_in_index_description)
|
||||
.parameters(json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"index_uid": {
|
||||
"type": "string",
|
||||
"enum": ["main"],
|
||||
"description": search_in_index_index_description,
|
||||
},
|
||||
"q": {
|
||||
"type": ["string", "null"],
|
||||
"description": search_in_index_q_param_description,
|
||||
}
|
||||
},
|
||||
"required": ["index_uid", "q"],
|
||||
"additionalProperties": false,
|
||||
}))
|
||||
.strict(true)
|
||||
.build()
|
||||
.unwrap(),
|
||||
)
|
||||
.build()
|
||||
.unwrap(),
|
||||
);
|
||||
response = client.chat().create(chat_completion.clone()).await.unwrap();
|
||||
|
||||
let choice = &mut response.choices[0];
|
||||
match choice.finish_reason {
|
||||
Some(FinishReason::ToolCalls) => {
|
||||
let tool_calls = mem::take(&mut choice.message.tool_calls).unwrap_or_default();
|
||||
|
||||
let (meili_calls, other_calls): (Vec<_>, Vec<_>) =
|
||||
tool_calls.into_iter().partition(|call| call.function.name == "searchInIndex");
|
||||
|
||||
chat_completion.messages.push(
|
||||
ChatCompletionRequestAssistantMessageArgs::default()
|
||||
.tool_calls(meili_calls.clone())
|
||||
.build()
|
||||
.unwrap()
|
||||
.into(),
|
||||
);
|
||||
|
||||
for call in meili_calls {
|
||||
let SearchInIndexParameters { index_uid, q } =
|
||||
serde_json::from_str(&call.function.arguments).unwrap();
|
||||
|
||||
let mut query = SearchQuery {
|
||||
q,
|
||||
hybrid: Some(HybridQuery {
|
||||
semantic_ratio: SemanticRatio::default(),
|
||||
embedder: EMBEDDER_NAME.to_string(),
|
||||
}),
|
||||
limit: 20,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) =
|
||||
index_scheduler.filters().get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query.filter, search_rules);
|
||||
}
|
||||
|
||||
// TBD
|
||||
// let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let search_kind = search_kind(
|
||||
&query,
|
||||
index_scheduler.get_ref(),
|
||||
index_uid.to_string(),
|
||||
&index,
|
||||
)?;
|
||||
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let features = index_scheduler.features();
|
||||
let index_cloned = index.clone();
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(
|
||||
index_uid.to_string(),
|
||||
&index_cloned,
|
||||
query,
|
||||
search_kind,
|
||||
RetrieveVectors::new(false),
|
||||
features,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
let search_result = search_result?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
// aggregate.succeed(search_result);
|
||||
if search_result.degraded {
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||
}
|
||||
}
|
||||
// analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
let formatted = format_documents(
|
||||
&index,
|
||||
search_result.hits.into_iter().map(|doc| doc.document),
|
||||
);
|
||||
let text = formatted.join("\n");
|
||||
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(
|
||||
ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id,
|
||||
content: ChatCompletionRequestToolMessageContent::Text(text),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
// Let the client call other tools by themselves
|
||||
if !other_calls.is_empty() {
|
||||
response.choices[0].message.tool_calls = Some(other_calls);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
async fn streamed_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_GET }>, Data<IndexScheduler>>,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
) -> impl Responder {
|
||||
assert!(chat_completion.stream.unwrap_or(false));
|
||||
|
||||
let api_key = std::env::var("MEILI_OPENAI_API_KEY")
|
||||
.expect("cannot find OpenAI API Key (MEILI_OPENAI_API_KEY)");
|
||||
let config = OpenAIConfig::default().with_api_key(&api_key); // we can also change the API base
|
||||
let client = Client::with_config(config);
|
||||
let response = client.chat().create_stream(chat_completion).await.unwrap();
|
||||
actix_web_lab::sse::Sse::from_stream(response.map(|response| {
|
||||
response
|
||||
.map(|mut r| Event::Data(sse::Data::new_json(r.choices.pop().unwrap().delta).unwrap()))
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SearchInIndexParameters {
|
||||
/// The index uid to search in.
|
||||
index_uid: String,
|
||||
/// The query parameter to use.
|
||||
q: Option<String>,
|
||||
}
|
||||
|
||||
fn format_documents(index: &Index, documents: impl Iterator<Item = Document>) -> Vec<String> {
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let IndexEmbeddingConfig { name: _, config, user_provided: _ } = index
|
||||
.embedding_configs(&rtxn)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.find(|conf| conf.name == EMBEDDER_NAME)
|
||||
.unwrap();
|
||||
|
||||
let EmbeddingConfig {
|
||||
embedder_options: _,
|
||||
prompt: PromptData { template, max_bytes },
|
||||
quantized: _,
|
||||
} = config;
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Doc<T: Serialize> {
|
||||
doc: T,
|
||||
}
|
||||
|
||||
let template = liquid::ParserBuilder::with_stdlib().build().unwrap().parse(&template).unwrap();
|
||||
documents
|
||||
.map(|doc| {
|
||||
let object = liquid::to_object(&Doc { doc }).unwrap();
|
||||
template.render(&object).unwrap()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -24,8 +24,8 @@ use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::features::{
|
||||
ChatCompletionPrompts as DbChatCompletionPrompts, ChatCompletionSettings as DbChatSettings,
|
||||
SystemRole,
|
||||
ChatCompletionPrompts as DbChatCompletionPrompts,
|
||||
ChatCompletionSource as DbChatCompletionSource, SystemRole,
|
||||
};
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::index::ChatConfig;
|
||||
@@ -37,7 +37,7 @@ use tokio::runtime::Handle;
|
||||
use tokio::sync::mpsc::error::SendError;
|
||||
|
||||
use super::config::Config;
|
||||
use super::errors::StreamErrorEvent;
|
||||
use super::errors::{MistralError, OpenAiOutsideError, StreamErrorEvent};
|
||||
use super::utils::format_documents;
|
||||
use super::{
|
||||
ChatsParam, MEILI_APPEND_CONVERSATION_MESSAGE_NAME, MEILI_SEARCH_IN_INDEX_FUNCTION_NAME,
|
||||
@@ -67,12 +67,6 @@ async fn chat(
|
||||
) -> impl Responder {
|
||||
let ChatsParam { workspace_uid } = chats_param.into_inner();
|
||||
|
||||
assert_eq!(
|
||||
chat_completion.n.unwrap_or(1),
|
||||
1,
|
||||
"Meilisearch /chat only support one completion at a time (n = 1, n = null)"
|
||||
);
|
||||
|
||||
if chat_completion.stream.unwrap_or(false) {
|
||||
Either::Right(
|
||||
streamed_chat(
|
||||
@@ -122,8 +116,24 @@ fn setup_search_tool(
|
||||
system_role: SystemRole,
|
||||
) -> Result<FunctionSupport, ResponseError> {
|
||||
let tools = chat_completion.tools.get_or_insert_default();
|
||||
if tools.iter().any(|t| t.function.name == MEILI_SEARCH_IN_INDEX_FUNCTION_NAME) {
|
||||
panic!("{MEILI_SEARCH_IN_INDEX_FUNCTION_NAME} function already set");
|
||||
for tool in &tools[..] {
|
||||
match tool.function.name.as_str() {
|
||||
MEILI_SEARCH_IN_INDEX_FUNCTION_NAME => {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("{MEILI_SEARCH_IN_INDEX_FUNCTION_NAME} function is already defined."),
|
||||
Code::BadRequest,
|
||||
));
|
||||
}
|
||||
MEILI_SEARCH_PROGRESS_NAME
|
||||
| MEILI_SEARCH_SOURCES_NAME
|
||||
| MEILI_APPEND_CONVERSATION_MESSAGE_NAME => (),
|
||||
external_function_name => {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("{external_function_name}: External functions are not supported yet."),
|
||||
Code::UnimplementedExternalFunctionCalling,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove internal tools used for front-end notifications as they should be hidden from the LLM.
|
||||
@@ -229,9 +239,6 @@ async fn process_search_request(
|
||||
index_uid: String,
|
||||
q: Option<String>,
|
||||
) -> Result<(Index, Vec<Document>, String), ResponseError> {
|
||||
// TBD
|
||||
// let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let rtxn = index.static_read_txn()?;
|
||||
let ChatConfig { description: _, prompt: _, search_parameters } = index.chat_config(&rtxn)?;
|
||||
@@ -290,7 +297,6 @@ async fn process_search_request(
|
||||
documents.push(document);
|
||||
}
|
||||
}
|
||||
// analytics.publish(aggregate, &req);
|
||||
|
||||
let (rtxn, search_result) = output?;
|
||||
let render_alloc = Bump::new();
|
||||
@@ -301,24 +307,36 @@ async fn process_search_request(
|
||||
Ok((index, documents, text))
|
||||
}
|
||||
|
||||
#[allow(unreachable_code, unused_variables)] // will be correctly implemented in the future
|
||||
async fn non_streamed_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_COMPLETIONS }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
workspace_uid: &str,
|
||||
req: HttpRequest,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
chat_completion: CreateChatCompletionRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let rtxn = index_scheduler.read_txn()?;
|
||||
let chat_settings = match index_scheduler.chat_settings(&rtxn, workspace_uid).unwrap() {
|
||||
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
|
||||
Code::UnimplementedMultiChoiceChatCompletions,
|
||||
));
|
||||
}
|
||||
|
||||
return Err(ResponseError::from_msg(
|
||||
"Non-streamed chat completions is not implemented".to_string(),
|
||||
Code::UnimplementedNonStreamingChatCompletions,
|
||||
));
|
||||
|
||||
let filters = index_scheduler.filters();
|
||||
let chat_settings = match index_scheduler.chat_settings(workspace_uid).unwrap() {
|
||||
Some(settings) => settings,
|
||||
None => {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("Chat `{workspace_uid}` not found"),
|
||||
Code::ChatWorkspaceNotFound,
|
||||
Code::ChatNotFound,
|
||||
))
|
||||
}
|
||||
};
|
||||
@@ -373,12 +391,11 @@ async fn non_streamed_chat(
|
||||
};
|
||||
|
||||
// TODO report documents sources later
|
||||
let text = match result {
|
||||
let answer = match result {
|
||||
Ok((_, _documents, text)) => text,
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
let answer = format!("{}\n\n{text}", chat_settings.prompts.pre_query);
|
||||
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(
|
||||
ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id.clone(),
|
||||
@@ -411,17 +428,22 @@ async fn streamed_chat(
|
||||
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let rtxn = index_scheduler.read_txn()?;
|
||||
let chat_settings = match index_scheduler.chat_settings(&rtxn, workspace_uid)? {
|
||||
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
|
||||
Code::UnimplementedMultiChoiceChatCompletions,
|
||||
));
|
||||
}
|
||||
|
||||
let chat_settings = match index_scheduler.chat_settings(workspace_uid)? {
|
||||
Some(settings) => settings,
|
||||
None => {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("Chat `{workspace_uid}` not found"),
|
||||
Code::ChatWorkspaceNotFound,
|
||||
Code::ChatNotFound,
|
||||
))
|
||||
}
|
||||
};
|
||||
drop(rtxn);
|
||||
|
||||
let config = Config::new(&chat_settings);
|
||||
let auth_token = extract_token_from_request(&req)?.unwrap().to_string();
|
||||
@@ -450,7 +472,7 @@ async fn streamed_chat(
|
||||
&search_queue,
|
||||
&auth_token,
|
||||
&client,
|
||||
&chat_settings,
|
||||
chat_settings.source,
|
||||
&mut chat_completion,
|
||||
&tx,
|
||||
&mut global_tool_calls,
|
||||
@@ -483,14 +505,14 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
search_queue: &web::Data<SearchQueue>,
|
||||
auth_token: &str,
|
||||
client: &Client<C>,
|
||||
chat_settings: &DbChatSettings,
|
||||
source: DbChatCompletionSource,
|
||||
chat_completion: &mut CreateChatCompletionRequest,
|
||||
tx: &SseEventSender,
|
||||
global_tool_calls: &mut HashMap<u32, Call>,
|
||||
function_support: FunctionSupport,
|
||||
) -> Result<ControlFlow<Option<FinishReason>, ()>, SendError<Event>> {
|
||||
let mut finish_reason = None;
|
||||
// safety: The unwrap can only happen if the stream is not correctly configured.
|
||||
// safety: unwrap: can only happens if `stream` was set to `false`
|
||||
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
|
||||
while let Some(result) = response.next().await {
|
||||
match result {
|
||||
@@ -530,15 +552,11 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
Call::External
|
||||
}
|
||||
});
|
||||
|
||||
if global_tool_calls.get(index).is_some_and(Call::is_external) {
|
||||
todo!("Support forwarding external tool calls");
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if !global_tool_calls.is_empty() {
|
||||
let (meili_calls, other_calls): (Vec<_>, Vec<_>) =
|
||||
let (meili_calls, _other_calls): (Vec<_>, Vec<_>) =
|
||||
mem::take(global_tool_calls)
|
||||
.into_values()
|
||||
.flat_map(|call| match call {
|
||||
@@ -563,17 +581,11 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
.into(),
|
||||
);
|
||||
|
||||
assert!(
|
||||
other_calls.is_empty(),
|
||||
"We do not support external tool forwarding for now"
|
||||
);
|
||||
|
||||
handle_meili_tools(
|
||||
index_scheduler,
|
||||
auth_ctrl,
|
||||
search_queue,
|
||||
auth_token,
|
||||
chat_settings,
|
||||
tx,
|
||||
meili_calls,
|
||||
chat_completion,
|
||||
@@ -588,7 +600,13 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
let error = StreamErrorEvent::from_openai_error(error).await.unwrap();
|
||||
let result = match source {
|
||||
DbChatCompletionSource::Mistral => {
|
||||
StreamErrorEvent::from_openai_error::<MistralError>(error).await
|
||||
}
|
||||
_ => StreamErrorEvent::from_openai_error::<OpenAiOutsideError>(error).await,
|
||||
};
|
||||
let error = result.unwrap_or_else(StreamErrorEvent::from_reqwest_error);
|
||||
tx.send_error(&error).await?;
|
||||
return Ok(ControlFlow::Break(None));
|
||||
}
|
||||
@@ -611,7 +629,6 @@ async fn handle_meili_tools(
|
||||
auth_ctrl: &web::Data<AuthController>,
|
||||
search_queue: &web::Data<SearchQueue>,
|
||||
auth_token: &str,
|
||||
chat_settings: &DbChatSettings,
|
||||
tx: &SseEventSender,
|
||||
meili_calls: Vec<ChatCompletionMessageToolCall>,
|
||||
chat_completion: &mut CreateChatCompletionRequest,
|
||||
@@ -639,8 +656,10 @@ async fn handle_meili_tools(
|
||||
.await?;
|
||||
}
|
||||
|
||||
let mut error = None;
|
||||
|
||||
let result = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => match process_search_request(
|
||||
index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
search_queue,
|
||||
@@ -649,22 +668,27 @@ async fn handle_meili_tools(
|
||||
q,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| e.to_string()),
|
||||
{
|
||||
Ok(output) => Ok(output),
|
||||
Err(err) => {
|
||||
let error_text = format!("the search tool call failed with {err}");
|
||||
error = Some(err);
|
||||
Err(error_text)
|
||||
}
|
||||
},
|
||||
Err(err) => Err(err.to_string()),
|
||||
};
|
||||
|
||||
let text = match result {
|
||||
let answer = match result {
|
||||
Ok((_index, documents, text)) => {
|
||||
if report_sources {
|
||||
tx.report_sources(resp.clone(), &call.id, &documents).await?;
|
||||
}
|
||||
|
||||
text
|
||||
}
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
let answer = format!("{}\n\n{text}", chat_settings.prompts.pre_query);
|
||||
let tool = ChatCompletionRequestMessage::Tool(ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id.clone(),
|
||||
content: ChatCompletionRequestToolMessageContent::Text(answer),
|
||||
@@ -675,6 +699,10 @@ async fn handle_meili_tools(
|
||||
}
|
||||
|
||||
chat_completion.messages.push(tool);
|
||||
|
||||
if let Some(error) = error {
|
||||
tx.send_error(&StreamErrorEvent::from_response_error(error)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -696,16 +724,13 @@ impl Call {
|
||||
matches!(self, Call::Internal { .. })
|
||||
}
|
||||
|
||||
fn is_external(&self) -> bool {
|
||||
matches!(self, Call::External { .. })
|
||||
}
|
||||
|
||||
/// # Panics
|
||||
///
|
||||
/// - if called on external calls
|
||||
fn append(&mut self, more: &str) {
|
||||
match self {
|
||||
Call::Internal { arguments, .. } => arguments.push_str(more),
|
||||
Call::External { .. } => {
|
||||
panic!("Cannot append argument chunks to an external function")
|
||||
}
|
||||
Call::External => panic!("Cannot append argument chunks to an external function"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,8 +24,9 @@ impl Config {
|
||||
if let Some(api_key) = chat_settings.api_key.as_ref() {
|
||||
config = config.with_api_key(api_key);
|
||||
}
|
||||
if let Some(base_api) = chat_settings.base_api.as_ref() {
|
||||
config = config.with_api_base(base_api);
|
||||
let base_url = chat_settings.base_url.as_deref();
|
||||
if let Some(base_url) = chat_settings.source.base_url().or(base_url) {
|
||||
config = config.with_api_base(base_url);
|
||||
}
|
||||
Self::OpenAiCompatible(config)
|
||||
}
|
||||
@@ -40,8 +41,8 @@ impl Config {
|
||||
if let Some(api_key) = chat_settings.api_key.as_ref() {
|
||||
config = config.with_api_key(api_key);
|
||||
}
|
||||
if let Some(base_api) = chat_settings.base_api.as_ref() {
|
||||
config = config.with_api_base(base_api);
|
||||
if let Some(base_url) = chat_settings.base_url.as_ref() {
|
||||
config = config.with_api_base(base_url);
|
||||
}
|
||||
Self::AzureOpenAiCompatible(config)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,42 @@
|
||||
use async_openai::error::{ApiError, OpenAIError};
|
||||
use async_openai::reqwest_eventsource::Error as EventSourceError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
/// The error type which is always `error`.
|
||||
const ERROR_TYPE: &str = "error";
|
||||
|
||||
/// The error struct returned by the Mistral API.
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "object": "error",
|
||||
/// "message": "Service tier capacity exceeded for this model.",
|
||||
/// "type": "invalid_request_error",
|
||||
/// "param": null,
|
||||
/// "code": null
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct MistralError {
|
||||
message: String,
|
||||
r#type: String,
|
||||
param: Option<String>,
|
||||
code: Option<String>,
|
||||
}
|
||||
|
||||
impl From<MistralError> for StreamErrorEvent {
|
||||
fn from(error: MistralError) -> Self {
|
||||
let MistralError { message, r#type, param, code } = error;
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: ERROR_TYPE.to_owned(),
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct OpenAiOutsideError {
|
||||
/// Emitted when an error occurs.
|
||||
@@ -22,6 +56,17 @@ pub struct OpenAiInnerError {
|
||||
r#type: String,
|
||||
}
|
||||
|
||||
impl From<OpenAiOutsideError> for StreamErrorEvent {
|
||||
fn from(error: OpenAiOutsideError) -> Self {
|
||||
let OpenAiOutsideError { error: OpenAiInnerError { code, message, param, r#type } } = error;
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error that occurs during the streaming process.
|
||||
///
|
||||
/// It directly comes from the OpenAI API and you can
|
||||
@@ -53,12 +98,15 @@ pub struct StreamError {
|
||||
}
|
||||
|
||||
impl StreamErrorEvent {
|
||||
pub async fn from_openai_error(error: OpenAIError) -> Result<Self, reqwest::Error> {
|
||||
let error_type = "error".to_string();
|
||||
pub async fn from_openai_error<E>(error: OpenAIError) -> Result<Self, reqwest::Error>
|
||||
where
|
||||
E: serde::de::DeserializeOwned,
|
||||
Self: From<E>,
|
||||
{
|
||||
match error {
|
||||
OpenAIError::Reqwest(e) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "internal_reqwest_error".to_string(),
|
||||
code: Some("internal".to_string()),
|
||||
@@ -69,7 +117,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
OpenAIError::ApiError(ApiError { message, r#type, param, code }) => {
|
||||
Ok(StreamErrorEvent {
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
error: StreamError {
|
||||
r#type: r#type.unwrap_or_else(|| "unknown".to_string()),
|
||||
@@ -82,7 +130,7 @@ impl StreamErrorEvent {
|
||||
}
|
||||
OpenAIError::JSONDeserialize(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "json_deserialize_error".to_string(),
|
||||
code: Some("internal".to_string()),
|
||||
@@ -94,30 +142,16 @@ impl StreamErrorEvent {
|
||||
OpenAIError::FileSaveError(_) | OpenAIError::FileReadError(_) => unreachable!(),
|
||||
OpenAIError::StreamError(error) => match error {
|
||||
EventSourceError::InvalidStatusCode(_status_code, response) => {
|
||||
let OpenAiOutsideError {
|
||||
error: OpenAiInnerError { code, message, param, r#type },
|
||||
} = response.json().await?;
|
||||
|
||||
Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
})
|
||||
let error = response.json::<E>().await?;
|
||||
Ok(StreamErrorEvent::from(error))
|
||||
}
|
||||
EventSourceError::InvalidContentType(_header_value, response) => {
|
||||
let OpenAiOutsideError {
|
||||
error: OpenAiInnerError { code, message, param, r#type },
|
||||
} = response.json().await?;
|
||||
|
||||
Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
})
|
||||
let error = response.json::<E>().await?;
|
||||
Ok(StreamErrorEvent::from(error))
|
||||
}
|
||||
EventSourceError::Utf8(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "invalid_utf8_error".to_string(),
|
||||
code: None,
|
||||
@@ -128,7 +162,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::Parser(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "parser_error".to_string(),
|
||||
code: None,
|
||||
@@ -139,7 +173,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::Transport(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "transport_error".to_string(),
|
||||
code: None,
|
||||
@@ -150,7 +184,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::InvalidLastEventId(message) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "invalid_last_event_id".to_string(),
|
||||
code: None,
|
||||
@@ -161,7 +195,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::StreamEnded => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "stream_ended".to_string(),
|
||||
code: None,
|
||||
@@ -173,7 +207,7 @@ impl StreamErrorEvent {
|
||||
},
|
||||
OpenAIError::InvalidArgument(message) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: error_type,
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "invalid_argument".to_string(),
|
||||
code: None,
|
||||
@@ -184,4 +218,33 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_response_error(error: ResponseError) -> Self {
|
||||
let ResponseError { code, message, .. } = error;
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "response_error".to_string(),
|
||||
code: Some(code.as_str().to_string()),
|
||||
message,
|
||||
param: None,
|
||||
event_id: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_reqwest_error(error: reqwest::Error) -> Self {
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "reqwest_error".to_string(),
|
||||
code: None,
|
||||
message: error.to_string(),
|
||||
param: None,
|
||||
event_id: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,9 +6,11 @@ use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||
use meilisearch_types::error::deserr_codes::{InvalidIndexLimit, InvalidIndexOffset};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::keys::actions;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tracing::debug;
|
||||
use utoipa::{IntoParams, ToSchema};
|
||||
|
||||
@@ -24,12 +26,21 @@ pub mod settings;
|
||||
mod utils;
|
||||
|
||||
/// The function name to report search progress.
|
||||
/// This function is used to report on what meilisearch is
|
||||
/// doing which must be used on the frontend to report progress.
|
||||
const MEILI_SEARCH_PROGRESS_NAME: &str = "_meiliSearchProgress";
|
||||
/// The function name to append a conversation message in the user conversation.
|
||||
/// This function is used to append a conversation message in the user conversation.
|
||||
/// This must be used on the frontend to keep context of what happened on the
|
||||
/// Meilisearch-side and keep good context for follow up questions.
|
||||
const MEILI_APPEND_CONVERSATION_MESSAGE_NAME: &str = "_meiliAppendConversationMessage";
|
||||
/// The function name to report sources to the frontend.
|
||||
/// This function is used to report sources to the frontend.
|
||||
/// The call id is associated to the one used by the search progress function.
|
||||
const MEILI_SEARCH_SOURCES_NAME: &str = "_meiliSearchSources";
|
||||
/// The *internal* function name to provide to the LLM to search in indexes.
|
||||
/// This function must not leak to the user as the LLM will call it and the
|
||||
/// main goal of Meilisearch is to provide an answer to these calls.
|
||||
const MEILI_SEARCH_IN_INDEX_FUNCTION_NAME: &str = "_meiliSearchInIndex";
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -40,11 +51,44 @@ pub struct ChatsParam {
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::get().to(list_workspaces))).service(
|
||||
web::scope("/{workspace_uid}")
|
||||
.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_chat))
|
||||
.route(web::delete().to(delete_chat)),
|
||||
)
|
||||
.service(web::scope("/chat/completions").configure(chat_completions::configure))
|
||||
.service(web::scope("/settings").configure(settings::configure)),
|
||||
);
|
||||
}
|
||||
|
||||
pub async fn get_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_GET }>, Data<IndexScheduler>>,
|
||||
workspace_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
index_scheduler.features().check_chat_completions("displaying a chat")?;
|
||||
|
||||
let workspace_uid = IndexUid::try_from(workspace_uid.into_inner())?;
|
||||
if index_scheduler.chat_workspace_exists(&workspace_uid)? {
|
||||
Ok(HttpResponse::Ok().json(json!({ "uid": workspace_uid })))
|
||||
} else {
|
||||
Err(ResponseError::from_msg(format!("chat {workspace_uid} not found"), Code::ChatNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_DELETE }>, Data<IndexScheduler>>,
|
||||
workspace_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
index_scheduler.features().check_chat_completions("deleting a chat")?;
|
||||
|
||||
let workspace_uid = workspace_uid.into_inner();
|
||||
if index_scheduler.delete_chat_settings(&workspace_uid)? {
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
} else {
|
||||
Err(ResponseError::from_msg(format!("chat {workspace_uid} not found"), Code::ChatNotFound))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserr, Debug, Clone, Copy, IntoParams)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[into_params(rename_all = "camelCase", parameter_in = Query)]
|
||||
@@ -79,12 +123,8 @@ pub async fn list_workspaces(
|
||||
index_scheduler.features().check_chat_completions("listing the chats")?;
|
||||
|
||||
debug!(parameters = ?paginate, "List chat workspaces");
|
||||
let filters = index_scheduler.filters();
|
||||
let (total, workspaces) = index_scheduler.paginated_chat_workspace_uids(
|
||||
filters,
|
||||
*paginate.offset,
|
||||
*paginate.limit,
|
||||
)?;
|
||||
let (total, workspaces) =
|
||||
index_scheduler.paginated_chat_workspace_uids(*paginate.offset, *paginate.limit)?;
|
||||
let workspaces =
|
||||
workspaces.into_iter().map(|uid| ChatWorkspaceView { uid }).collect::<Vec<_>>();
|
||||
let ret = paginate.as_pagination().format_with(total, workspaces);
|
||||
|
||||
@@ -7,9 +7,9 @@ use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::features::{
|
||||
ChatCompletionPrompts as DbChatCompletionPrompts, ChatCompletionSettings,
|
||||
ChatCompletionSource as DbChatCompletionSource, DEFAULT_CHAT_PRE_QUERY_PROMPT,
|
||||
DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT, DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT,
|
||||
DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT, DEFAULT_CHAT_SYSTEM_PROMPT,
|
||||
ChatCompletionSource as DbChatCompletionSource, DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT,
|
||||
DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT, DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT,
|
||||
DEFAULT_CHAT_SYSTEM_PROMPT,
|
||||
};
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
@@ -26,7 +26,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
web::resource("")
|
||||
.route(web::get().to(SeqHandler(get_settings)))
|
||||
.route(web::patch().to(SeqHandler(patch_settings)))
|
||||
.route(web::delete().to(SeqHandler(delete_settings))),
|
||||
.route(web::delete().to(SeqHandler(reset_settings))),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -41,14 +41,12 @@ async fn get_settings(
|
||||
|
||||
let ChatsParam { workspace_uid } = chats_param.into_inner();
|
||||
|
||||
// TODO do a spawn_blocking here ???
|
||||
let rtxn = index_scheduler.read_txn()?;
|
||||
let mut settings = match index_scheduler.chat_settings(&rtxn, &workspace_uid)? {
|
||||
let mut settings = match index_scheduler.chat_settings(&workspace_uid)? {
|
||||
Some(settings) => settings,
|
||||
None => {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("Chat `{workspace_uid}` not found"),
|
||||
Code::ChatWorkspaceNotFound,
|
||||
Code::ChatNotFound,
|
||||
))
|
||||
}
|
||||
};
|
||||
@@ -62,14 +60,12 @@ async fn patch_settings(
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
chats_param: web::Path<ChatsParam>,
|
||||
web::Json(new): web::Json<GlobalChatSettings>,
|
||||
web::Json(new): web::Json<ChatWorkspaceSettings>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
|
||||
let ChatsParam { workspace_uid } = chats_param.into_inner();
|
||||
|
||||
// TODO do a spawn_blocking here
|
||||
let mut wtxn = index_scheduler.write_txn()?;
|
||||
let old_settings = index_scheduler.chat_settings(&wtxn, &workspace_uid)?.unwrap_or_default();
|
||||
let old_settings = index_scheduler.chat_settings(&workspace_uid)?.unwrap_or_default();
|
||||
|
||||
let prompts = match new.prompts {
|
||||
Setting::Set(new_prompts) => DbChatCompletionPrompts {
|
||||
@@ -93,17 +89,12 @@ async fn patch_settings(
|
||||
Setting::Reset => DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT.to_string(),
|
||||
Setting::NotSet => old_settings.prompts.search_index_uid_param,
|
||||
},
|
||||
pre_query: match new_prompts.pre_query {
|
||||
Setting::Set(new_description) => new_description,
|
||||
Setting::Reset => DEFAULT_CHAT_PRE_QUERY_PROMPT.to_string(),
|
||||
Setting::NotSet => old_settings.prompts.pre_query,
|
||||
},
|
||||
},
|
||||
Setting::Reset => DbChatCompletionPrompts::default(),
|
||||
Setting::NotSet => old_settings.prompts,
|
||||
};
|
||||
|
||||
let settings = ChatCompletionSettings {
|
||||
let mut settings = ChatCompletionSettings {
|
||||
source: match new.source {
|
||||
Setting::Set(new_source) => new_source.into(),
|
||||
Setting::Reset => DbChatCompletionSource::default(),
|
||||
@@ -129,10 +120,10 @@ async fn patch_settings(
|
||||
Setting::Reset => None,
|
||||
Setting::NotSet => old_settings.deployment_id,
|
||||
},
|
||||
base_api: match new.base_api {
|
||||
Setting::Set(new_base_api) => Some(new_base_api),
|
||||
base_url: match new.base_url {
|
||||
Setting::Set(new_base_url) => Some(new_base_url),
|
||||
Setting::Reset => None,
|
||||
Setting::NotSet => old_settings.base_api,
|
||||
Setting::NotSet => old_settings.base_url,
|
||||
},
|
||||
api_key: match new.api_key {
|
||||
Setting::Set(new_api_key) => Some(new_api_key),
|
||||
@@ -151,15 +142,17 @@ async fn patch_settings(
|
||||
// &req,
|
||||
// );
|
||||
|
||||
index_scheduler.put_chat_settings(&mut wtxn, &workspace_uid, &settings)?;
|
||||
wtxn.commit()?;
|
||||
settings.validate()?;
|
||||
index_scheduler.put_chat_settings(&workspace_uid, &settings)?;
|
||||
|
||||
settings.hide_secrets();
|
||||
|
||||
Ok(HttpResponse::Ok().json(settings))
|
||||
}
|
||||
|
||||
async fn delete_settings(
|
||||
async fn reset_settings(
|
||||
index_scheduler: GuardedData<
|
||||
ActionPolicy<{ actions::CHATS_SETTINGS_DELETE }>,
|
||||
ActionPolicy<{ actions::CHATS_SETTINGS_UPDATE }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
chats_param: web::Path<ChatsParam>,
|
||||
@@ -167,16 +160,14 @@ async fn delete_settings(
|
||||
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
|
||||
|
||||
let ChatsParam { workspace_uid } = chats_param.into_inner();
|
||||
|
||||
// TODO do a spawn_blocking here
|
||||
let mut wtxn = index_scheduler.write_txn()?;
|
||||
if index_scheduler.delete_chat_settings(&mut wtxn, &workspace_uid)? {
|
||||
wtxn.commit()?;
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
if index_scheduler.chat_settings(&workspace_uid)?.is_some() {
|
||||
let settings = Default::default();
|
||||
index_scheduler.put_chat_settings(&workspace_uid, &settings)?;
|
||||
Ok(HttpResponse::Ok().json(settings))
|
||||
} else {
|
||||
Err(ResponseError::from_msg(
|
||||
format!("Chat `{workspace_uid}` not found"),
|
||||
Code::ChatWorkspaceNotFound,
|
||||
Code::ChatNotFound,
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -185,7 +176,7 @@ async fn delete_settings(
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct GlobalChatSettings {
|
||||
pub struct ChatWorkspaceSettings {
|
||||
#[serde(default)]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<ChatCompletionSource>)]
|
||||
@@ -209,7 +200,7 @@ pub struct GlobalChatSettings {
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionBaseApi>)]
|
||||
#[schema(value_type = Option<String>, example = json!("https://api.mistral.ai/v1"))]
|
||||
pub base_api: Setting<String>,
|
||||
pub base_url: Setting<String>,
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionApiKey>)]
|
||||
#[schema(value_type = Option<String>, example = json!("abcd1234..."))]
|
||||
@@ -226,12 +217,21 @@ pub struct GlobalChatSettings {
|
||||
pub enum ChatCompletionSource {
|
||||
#[default]
|
||||
OpenAi,
|
||||
Mistral,
|
||||
Gemini,
|
||||
AzureOpenAi,
|
||||
VLlm,
|
||||
}
|
||||
|
||||
impl From<ChatCompletionSource> for DbChatCompletionSource {
|
||||
fn from(source: ChatCompletionSource) -> Self {
|
||||
use ChatCompletionSource::*;
|
||||
match source {
|
||||
ChatCompletionSource::OpenAi => DbChatCompletionSource::OpenAi,
|
||||
OpenAi => DbChatCompletionSource::OpenAi,
|
||||
Mistral => DbChatCompletionSource::Mistral,
|
||||
Gemini => DbChatCompletionSource::Gemini,
|
||||
AzureOpenAi => DbChatCompletionSource::AzureOpenAi,
|
||||
VLlm => DbChatCompletionSource::VLlm,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -257,8 +257,4 @@ pub struct ChatPrompts {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchIndexUidParamPrompt>)]
|
||||
#[schema(value_type = Option<String>, example = json!("This is index you want to search in..."))]
|
||||
pub search_index_uid_param: Setting<String>,
|
||||
#[serde(default)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionPreQueryPrompt>)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub pre_query: Setting<String>,
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use async_openai::types::{
|
||||
FunctionCall, FunctionCallStream, Role,
|
||||
};
|
||||
use bumpalo::Bump;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::milli::index::ChatConfig;
|
||||
use meilisearch_types::milli::prompt::{Prompt, PromptData};
|
||||
@@ -41,7 +41,7 @@ impl SseEventSender {
|
||||
function_name: String,
|
||||
function_arguments: String,
|
||||
) -> Result<(), SendError<Event>> {
|
||||
#[allow(deprecated)]
|
||||
#[allow(deprecated)] // function_call
|
||||
let message =
|
||||
ChatCompletionRequestMessage::Assistant(ChatCompletionRequestAssistantMessage {
|
||||
content: None,
|
||||
@@ -78,7 +78,7 @@ impl SseEventSender {
|
||||
|
||||
resp.choices[0] = ChatChoiceStream {
|
||||
index: 0,
|
||||
#[allow(deprecated)]
|
||||
#[allow(deprecated)] // function_call
|
||||
delta: ChatCompletionStreamResponseDelta {
|
||||
content: None,
|
||||
function_call: None,
|
||||
@@ -125,7 +125,7 @@ impl SseEventSender {
|
||||
|
||||
resp.choices[0] = ChatChoiceStream {
|
||||
index: 0,
|
||||
#[allow(deprecated)]
|
||||
#[allow(deprecated)] // function_call
|
||||
delta: ChatCompletionStreamResponseDelta {
|
||||
content: None,
|
||||
function_call: None,
|
||||
@@ -170,7 +170,7 @@ impl SseEventSender {
|
||||
|
||||
resp.choices[0] = ChatChoiceStream {
|
||||
index: 0,
|
||||
#[allow(deprecated)]
|
||||
#[allow(deprecated)] // function_call
|
||||
delta: ChatCompletionStreamResponseDelta {
|
||||
content: None,
|
||||
function_call: None,
|
||||
@@ -197,7 +197,10 @@ impl SseEventSender {
|
||||
}
|
||||
|
||||
pub async fn stop(self) -> Result<(), SendError<Event>> {
|
||||
self.0.send(Event::Data(sse::Data::new("[DONE]"))).await
|
||||
// It is the way OpenAI sends a correct end of stream
|
||||
// <https://platform.openai.com/docs/api-reference/assistants-streaming/events>
|
||||
const DONE_DATA: &str = "[DONE]";
|
||||
self.0.send(Event::Data(sse::Data::new(DONE_DATA))).await
|
||||
}
|
||||
|
||||
async fn send_json<S: Serialize>(&self, data: &S) -> Result<(), SendError<Event>> {
|
||||
@@ -232,10 +235,17 @@ pub fn format_documents<'doc>(
|
||||
for (docid, external_docid) in internal_docids.into_iter().zip(external_ids) {
|
||||
let document = match DocumentFromDb::new(docid, rtxn, index, &fid_map)? {
|
||||
Some(doc) => doc,
|
||||
None => continue,
|
||||
None => unreachable!("Document with internal ID {docid} not found"),
|
||||
};
|
||||
let text = match prompt.render_document(&external_docid, document, &gfid_map, doc_alloc) {
|
||||
Ok(text) => text,
|
||||
Err(err) => {
|
||||
return Err(ResponseError::from_msg(
|
||||
err.to_string(),
|
||||
Code::InvalidChatSettingDocumentTemplate,
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let text = prompt.render_document(&external_docid, document, &gfid_map, doc_alloc).unwrap();
|
||||
renders.push(text);
|
||||
}
|
||||
|
||||
|
||||
@@ -569,10 +569,6 @@ pub async fn update_all(
|
||||
debug!(parameters = ?new_settings, "Update all settings");
|
||||
let new_settings = validate_settings(new_settings, &index_scheduler)?;
|
||||
|
||||
if !new_settings.chat.is_not_set() {
|
||||
index_scheduler.features().check_chat_completions("setting `chat` in the index route")?;
|
||||
}
|
||||
|
||||
analytics.publish(
|
||||
SettingsAnalytics {
|
||||
ranking_rules: RankingRulesAnalytics::new(new_settings.ranking_rules.as_ref().set()),
|
||||
|
||||
@@ -57,7 +57,7 @@ pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
|
||||
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
||||
pub const DEFAULT_SEMANTIC_RATIO: fn() -> SemanticRatio = || SemanticRatio(0.5);
|
||||
|
||||
#[derive(Clone, PartialEq, Deserr, ToSchema)]
|
||||
#[derive(Clone, Default, PartialEq, Deserr, ToSchema)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQuery {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||
@@ -145,19 +145,9 @@ impl From<SearchParameters> for SearchQuery {
|
||||
matching_strategy: matching_strategy.map(MatchingStrategy::from).unwrap_or_default(),
|
||||
attributes_to_search_on,
|
||||
ranking_score_threshold: ranking_score_threshold.map(RankingScoreThreshold::from),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for SearchQuery {
|
||||
fn default() -> Self {
|
||||
SearchQuery {
|
||||
q: None,
|
||||
vector: None,
|
||||
hybrid: None,
|
||||
offset: DEFAULT_SEARCH_OFFSET(),
|
||||
limit: DEFAULT_SEARCH_LIMIT(),
|
||||
page: None,
|
||||
hits_per_page: None,
|
||||
attributes_to_retrieve: None,
|
||||
@@ -169,15 +159,10 @@ impl Default for SearchQuery {
|
||||
show_ranking_score: false,
|
||||
show_ranking_score_details: false,
|
||||
filter: None,
|
||||
sort: None,
|
||||
distinct: None,
|
||||
facets: None,
|
||||
highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(),
|
||||
highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(),
|
||||
crop_marker: DEFAULT_CROP_MARKER(),
|
||||
matching_strategy: Default::default(),
|
||||
attributes_to_search_on: None,
|
||||
ranking_score_threshold: None,
|
||||
locales: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//! This file implements a queue of searches to process and the ability to control how many searches can be run in parallel.
|
||||
//! We need this because we don't want to process more search requests than we have cores.
|
||||
//! We need this because we don't want to process more search requests than the available CPU cores.
|
||||
//! That slows down everything and consumes RAM for no reason.
|
||||
//! The steps to do a search are to get the `SearchQueue` data structure and try to get a search permit.
|
||||
//! This can fail if the queue is full, and we need to drop your search request to register a new one.
|
||||
@@ -8,7 +8,7 @@
|
||||
//!
|
||||
//! In order to do a search request you should try to get a search permit.
|
||||
//! Retrieve the `SearchQueue` structure from actix-web (`search_queue: Data<SearchQueue>`)
|
||||
//! and right before processing the search, calls the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
|
||||
//! and right before processing the search, call the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
|
||||
//!
|
||||
//! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel.
|
||||
//! Then, the queue/scheduler is going to either:
|
||||
@@ -121,12 +121,12 @@ impl SearchQueue {
|
||||
let mut queue: Vec<oneshot::Sender<Permit>> = Default::default();
|
||||
let mut rng: StdRng = StdRng::from_entropy();
|
||||
let mut searches_running: usize = 0;
|
||||
// By having a capacity of parallelism we ensures that every time a search finish it can release its RAM asap
|
||||
// By having a capacity of parallelism we ensure that every time a search finish it can release its RAM asap
|
||||
let (sender, mut search_finished) = mpsc::channel(parallelism.into());
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
// biased select because we wants to free up space before trying to register new tasks
|
||||
// biased select because we want to free up space before trying to register new tasks
|
||||
biased;
|
||||
_ = search_finished.recv() => {
|
||||
searches_running = searches_running.saturating_sub(1);
|
||||
@@ -148,11 +148,11 @@ impl SearchQueue {
|
||||
|
||||
if searches_running < usize::from(parallelism) && queue.is_empty() {
|
||||
searches_running += 1;
|
||||
// if the search requests die it's not a hard error on our side
|
||||
// if the search requests die, it's not a hard error on our side
|
||||
let _ = search_request.send(Permit { sender: sender.clone() });
|
||||
continue;
|
||||
} else if capacity == 0 {
|
||||
// in the very specific case where we have a capacity of zero
|
||||
// in the very specific case where we have a capacity of zero,
|
||||
// we must refuse the request straight away without going through
|
||||
// the queue stuff.
|
||||
drop(search_request);
|
||||
@@ -183,7 +183,7 @@ impl SearchQueue {
|
||||
.map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))?;
|
||||
|
||||
// If we've been for more than one minute to get a search permit, it's better to simply
|
||||
// abort the search request than spending time processing something were the client
|
||||
// abort the search request than spending time processing something where the client
|
||||
// most certainly exited or got a timeout a long time ago.
|
||||
// We may find a better solution in https://github.com/actix/actix-web/issues/3462.
|
||||
if now.elapsed() > self.time_to_abort {
|
||||
|
||||
@@ -421,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
{
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletion`, `chats.get`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `chatsSettings.delete`",
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
@@ -538,7 +538,7 @@ async fn error_add_api_key_parameters_uid_already_exist() {
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
{
|
||||
"message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.",
|
||||
"message": "`uid` field value `[uuid]` is already an existing API key.",
|
||||
"code": "api_key_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#api_key_already_exists"
|
||||
@@ -856,7 +856,7 @@ async fn list_api_keys() {
|
||||
"key": "[ignored]",
|
||||
"uid": "[ignored]",
|
||||
"actions": [
|
||||
"chatCompletion",
|
||||
"chatCompletions",
|
||||
"search"
|
||||
],
|
||||
"indexes": [
|
||||
|
||||
@@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletion`, `chats.get`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `chatsSettings.delete`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
||||
@@ -29,6 +29,10 @@ impl<'a> Index<'a, Owned> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_encoder(&self, encoder: Encoder) -> Index<'a, Owned> {
|
||||
Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData }
|
||||
}
|
||||
|
||||
pub async fn load_test_set(&self) -> u64 {
|
||||
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
|
||||
let (response, code) = self
|
||||
@@ -290,6 +294,20 @@ impl Index<'_, Shared> {
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
|
||||
pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._update(primary_key).await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`update_index_fail` succeeded: {}",
|
||||
serde_json::to_string_pretty(&task).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@@ -333,6 +351,14 @@ impl<State> Index<'_, State> {
|
||||
self.service.post_encoded("/indexes", body, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let body = json!({
|
||||
"primaryKey": primary_key,
|
||||
});
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.patch_encoded(url, body, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _delete(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
|
||||
@@ -128,7 +128,8 @@ impl Display for Value {
|
||||
".finishedAt" => "[date]",
|
||||
".duration" => "[duration]",
|
||||
".processingTimeMs" => "[duration]",
|
||||
".details.embedders.*.url" => "[url]"
|
||||
".details.embedders.*.url" => "[url]",
|
||||
".details.dumpUid" => "[dump_uid]",
|
||||
})
|
||||
)
|
||||
}
|
||||
@@ -264,6 +265,24 @@ pub static SCORE_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
])
|
||||
});
|
||||
|
||||
pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SHARED_SCORE_DOCUMENTS").to_shared();
|
||||
let documents = SCORE_DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
|
||||
pub static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
@@ -333,7 +352,7 @@ pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Sha
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["father", "doggos"], "sortableAttributes": ["doggos"]}),
|
||||
json!({"filterableAttributes": ["father", "doggos", "cattos"], "sortableAttributes": ["doggos"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
@@ -435,3 +454,57 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub static GEO_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Taco Truck",
|
||||
"address": "444 Salsa Street, Burritoville",
|
||||
"type": "Mexican",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": 34.0522,
|
||||
"lng": -118.2437
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "La Bella Italia",
|
||||
"address": "456 Elm Street, Townsville",
|
||||
"type": "Italian",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": "45.4777599",
|
||||
"lng": "9.1967508"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Crêpe Truck",
|
||||
"address": "2 Billig Avenue, Rouenville",
|
||||
"type": "French",
|
||||
"rating": 10
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX
|
||||
.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SHARED_GEO_DOCUMENTS").to_shared();
|
||||
let (response, _code) = index._add_documents(GEO_DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["_geo"], "sortableAttributes": ["_geo"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -347,6 +347,16 @@ impl<State> Server<State> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unique_index_with_prefix(&self, prefix: &str) -> Index<'_> {
|
||||
let uuid = Uuid::new_v4();
|
||||
Index {
|
||||
uid: format!("{prefix}-{}", uuid),
|
||||
service: &self.service,
|
||||
encoder: Encoder::Plain,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unique_index_with_encoder(&self, encoder: Encoder) -> Index<'_> {
|
||||
let uuid = Uuid::new_v4();
|
||||
Index { uid: uuid.to_string(), service: &self.service, encoder, marker: PhantomData }
|
||||
@@ -399,18 +409,9 @@ impl<State> Server<State> {
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
// Increase timeout for vector-related tests
|
||||
let max_attempts = if url.contains("/tasks/") {
|
||||
if update_id > 1000 {
|
||||
400 // 200 seconds for vector tests
|
||||
} else {
|
||||
100 // 50 seconds for other tests
|
||||
}
|
||||
} else {
|
||||
100 // 50 seconds for other tests
|
||||
};
|
||||
let max_attempts = 400; // 200 seconds total, 0.5s per attempt
|
||||
|
||||
for _ in 0..max_attempts {
|
||||
for i in 0..max_attempts {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
@@ -420,6 +421,10 @@ impl<State> Server<State> {
|
||||
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
if i == max_attempts - 1 {
|
||||
dbg!(response);
|
||||
}
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,39 +1,35 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use crate::common::{shared_does_not_exists_index, GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, code) = index.delete_document(0).await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_unexisting_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
let (response, code) = index.delete_document(0).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
let update = index.wait_task(response.uid()).await;
|
||||
assert_eq!(update["status"], "succeeded");
|
||||
assert_eq!(code, 202, "{response}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) =
|
||||
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, status_code) = server.index("test").delete_document(0).await;
|
||||
let (task, status_code) = index.delete_document(0).await;
|
||||
assert_eq!(status_code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
@@ -43,20 +39,18 @@ async fn delete_one_document() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index
|
||||
.add_documents(
|
||||
json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]),
|
||||
@@ -67,7 +61,7 @@ async fn clear_all_documents() {
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await;
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
@@ -75,14 +69,14 @@ async fn clear_all_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents_empty_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await;
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
@@ -90,33 +84,31 @@ async fn clear_all_documents_empty_index() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_delete_batch_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.delete_batch(vec![]).await;
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
let response = index.wait_task(task.uid()).await.failed();
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, code) = index.delete_batch(vec![1, 0]).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await;
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
@@ -125,14 +117,14 @@ async fn delete_batch() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_no_document_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (_response, code) = index.delete_batch(vec![]).await;
|
||||
assert_eq!(code, 202, "{}", _response);
|
||||
let (response, code) = index.delete_batch(vec![]).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
|
||||
let _update = index.wait_task(_response.uid()).await;
|
||||
let _update = index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
||||
@@ -140,8 +132,8 @@ async fn delete_no_document_batch() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_document_by_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.update_settings_filterable_attributes(json!(["color"])).await;
|
||||
let (task, _status_code) = index
|
||||
.add_documents(
|
||||
@@ -178,22 +170,22 @@ async fn delete_document_by_filter() {
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -251,22 +243,22 @@ async fn delete_document_by_filter() {
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "color NOT EXISTS"})).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"taskUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -321,8 +313,8 @@ async fn delete_document_by_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_document_by_complex_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.update_settings_filterable_attributes(json!(["color"])).await;
|
||||
let (task, _status_code) = index
|
||||
.add_documents(
|
||||
@@ -343,22 +335,22 @@ async fn delete_document_by_complex_filter() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -402,22 +394,22 @@ async fn delete_document_by_complex_filter() {
|
||||
.delete_document_by_filter(json!({ "filter": [["color = green", "color NOT EXISTS"]] }))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"taskUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
|
||||
@@ -621,7 +621,7 @@ async fn delete_document_by_filter() {
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter_fail(json!({ "filter": "catto = jorts"})).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await.failed();
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@@ -665,7 +665,7 @@ async fn fetch_document_by_filter() {
|
||||
Some("id"),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.fetch_documents(json!(null)).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
|
||||
@@ -832,8 +832,8 @@ async fn get_document_by_ids_and_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_with_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
|
||||
@@ -6,19 +6,18 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_document_update_create_index_bad_uid() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("883 fj!");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("883 fj!");
|
||||
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
});
|
||||
|
||||
assert_eq!(code, 400);
|
||||
assert_eq!(response, expected_response);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "`883 fj!-[uuid]` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
}"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -46,8 +46,10 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
|
||||
let server = Server::new_shared();
|
||||
let app = server.init_web_app().await;
|
||||
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let body = serde_json::to_string(&json!({
|
||||
"uid": "test",
|
||||
"uid": index.uid.clone(),
|
||||
"primaryKey": None::<&str>,
|
||||
}))
|
||||
.unwrap();
|
||||
@@ -68,7 +70,7 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
|
||||
let parsed_response =
|
||||
serde_json::from_slice::<Value>(decoded.into().as_ref()).expect("Expecting valid json");
|
||||
|
||||
assert_eq!(parsed_response["indexUid"], "test");
|
||||
assert_eq!(parsed_response["indexUid"], index.uid);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -28,6 +28,7 @@ async fn error_delete_unexisting_index() {
|
||||
let (task, code) = index.delete_index_fail().await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `DOES_NOT_EXISTS` not found.",
|
||||
@@ -57,7 +58,7 @@ async fn loop_delete_add_documents() {
|
||||
}
|
||||
|
||||
for task in tasks {
|
||||
let response = index.wait_task(task).await;
|
||||
let response = index.wait_task(task).await.succeeded();
|
||||
assert_eq!(response["status"], "succeeded", "{}", response);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,19 +52,28 @@ async fn no_index_return_empty_list() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_multiple_indexes() {
|
||||
let server = Server::new().await;
|
||||
server.index("test").create(None).await;
|
||||
let (task, _status_code) = server.index("test1").create(Some("key")).await;
|
||||
let server = Server::new_shared();
|
||||
|
||||
server.index("test").wait_task(task.uid()).await.succeeded();
|
||||
let index_without_key = server.unique_index();
|
||||
let (response_without_key, _status_code) = index_without_key.create(None).await;
|
||||
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
let index_with_key = server.unique_index();
|
||||
let (response_with_key, _status_code) = index_with_key.create(Some("key")).await;
|
||||
|
||||
index_without_key.wait_task(response_without_key.uid()).await.succeeded();
|
||||
index_with_key.wait_task(response_with_key.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server.list_indexes(None, Some(1000)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 2);
|
||||
assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
|
||||
assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||
assert!(arr.len() >= 2, "Expected at least 2 indexes.");
|
||||
assert!(arr
|
||||
.iter()
|
||||
.any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null));
|
||||
assert!(arr
|
||||
.iter()
|
||||
.any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use crate::common::Server;
|
||||
use crate::common::{shared_does_not_exists_index, Server};
|
||||
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stats() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.create(Some("id")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
@@ -15,7 +16,7 @@ async fn stats() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["numberOfDocuments"], 0);
|
||||
assert!(response["isIndexing"] == false);
|
||||
assert_eq!(response["isIndexing"], false);
|
||||
assert!(response["fieldDistribution"].as_object().unwrap().is_empty());
|
||||
|
||||
let documents = json!([
|
||||
@@ -31,7 +32,6 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
@@ -39,7 +39,7 @@ async fn stats() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["numberOfDocuments"], 2);
|
||||
assert!(response["isIndexing"] == false);
|
||||
assert_eq!(response["isIndexing"], false);
|
||||
assert_eq!(response["fieldDistribution"]["id"], 2);
|
||||
assert_eq!(response["fieldDistribution"]["name"], 1);
|
||||
assert_eq!(response["fieldDistribution"]["age"], 1);
|
||||
@@ -47,11 +47,11 @@ async fn stats() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_stats_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").stats().await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (response, code) = index.stats().await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
|
||||
@@ -2,28 +2,26 @@ use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Server;
|
||||
use crate::common::{shared_does_not_exists_index, shared_index_with_documents, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_primary_key() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(None).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["uid"], index.uid);
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
|
||||
@@ -39,24 +37,23 @@ async fn update_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_and_update_with_different_encoding() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index_with_encoder("test", Encoder::Gzip);
|
||||
let (_, code) = index.create(None).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_encoder(Encoder::Gzip);
|
||||
let (create_task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(create_task.uid()).await.succeeded();
|
||||
|
||||
let index = server.index_with_encoder("test", Encoder::Brotli);
|
||||
let index = index.with_encoder(Encoder::Brotli);
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_nothing() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task1, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
@@ -67,35 +64,20 @@ async fn update_nothing() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task2.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task2.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_existing_primary_key() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.create(Some("id")).await;
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
let (update_task, code) = index.update_index_fail(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": "11",
|
||||
"content": "foobar"
|
||||
}
|
||||
]);
|
||||
index.add_documents(documents, None).await;
|
||||
|
||||
let (task, code) = index.update(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
let response = index.wait_task(update_task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test`: Index already has a primary key: `id`.",
|
||||
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
|
||||
"code": "index_primary_key_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
|
||||
@@ -106,15 +88,15 @@ async fn error_update_existing_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (task, code) = server.index("test").update(None).await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = server.index("test").wait_task(task.uid()).await;
|
||||
let response = index.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
|
||||
@@ -146,8 +146,8 @@ static DOCUMENT_DISTINCT_KEY: &str = "product_id";
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_offset_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
@@ -163,50 +163,50 @@ async fn distinct_search_with_offset_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["estimatedTotalHits"] , @"11");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"10");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"1");
|
||||
snapshot!(format!("{:?}", hits), @r#"["345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
}
|
||||
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4130
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_pagination_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
@@ -222,7 +222,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["page"], @"0");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -231,7 +231,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["page"], @"1");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -240,7 +240,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -249,7 +249,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"3");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -258,7 +258,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["page"], @"4");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -267,7 +267,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"3");
|
||||
snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["987654", "234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"2");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -275,13 +275,13 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_at_search_time() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("tamo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
|
||||
let task = index.wait_task(task.uid()).await;
|
||||
let task = index.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(task, name: "succeed");
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<String> {
|
||||
@@ -299,7 +299,7 @@ async fn distinct_at_search_time() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"3");
|
||||
snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###);
|
||||
snapshot!(format!("{hits:?}"), @r###"["1", "2", "3"]"###);
|
||||
snapshot!(response["page"], @"1");
|
||||
snapshot!(response["totalPages"], @"1");
|
||||
snapshot!(response["totalHits"], @"3");
|
||||
|
||||
@@ -707,7 +707,7 @@ async fn filter_invalid_attribute_array() {
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -728,7 +728,7 @@ async fn filter_invalid_attribute_string() {
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -885,7 +885,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -911,7 +911,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -932,7 +932,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -958,7 +958,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -984,7 +984,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -1143,7 +1143,7 @@ async fn search_on_unknown_field() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@@ -1164,7 +1164,7 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@@ -1182,7 +1182,7 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@@ -1195,10 +1195,8 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_at_search_time() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) =
|
||||
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
@@ -1208,7 +1206,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
@@ -1223,7 +1221,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
@@ -1238,7 +1236,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
|
||||
@@ -50,13 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@@ -65,21 +63,18 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = server.delete_index("test").await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
eprintln!("Settings -> Documents -> test");
|
||||
let index = server.index("test");
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@@ -88,14 +83,13 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = server.delete_index("test").await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -105,20 +99,20 @@ async fn simple_facet_search() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventure"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_on_movies() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -212,23 +206,23 @@ async fn simple_facet_search_on_movies() {
|
||||
]);
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Action","count":2},{"value":"Adventure","count":3},{"value":"Drama","count":3},{"value":"Fantasy","count":1},{"value":"Romance","count":1},{"value":"Science Fiction","count":1}]"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn advanced_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -251,8 +245,8 @@ async fn advanced_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn more_advanced_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -275,8 +269,8 @@ async fn more_advanced_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_with_max_values() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
|
||||
@@ -287,14 +281,14 @@ async fn simple_facet_search_with_max_values() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_by_count_with_max_values() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
@@ -309,14 +303,14 @@ async fn simple_facet_search_by_count_with_max_values() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn non_filterable_facet_search_error() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
@@ -324,17 +318,17 @@ async fn non_filterable_facet_search_error() {
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adv"})).await;
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn facet_search_dont_support_words() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -344,14 +338,14 @@ async fn facet_search_dont_support_words() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_with_sort_by_count() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
|
||||
@@ -362,7 +356,7 @@ async fn simple_facet_search_with_sort_by_count() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
let hits = response["facetHits"].as_array().unwrap();
|
||||
assert_eq!(hits.len(), 2);
|
||||
assert_eq!(hits[0], json!({ "value": "Action", "count": 3 }));
|
||||
@@ -371,25 +365,25 @@ async fn simple_facet_search_with_sort_by_count() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_and_deactivate_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": false,
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "The facet search is disabled for this index",
|
||||
@@ -402,8 +396,8 @@ async fn add_documents_and_deactivate_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_and_add_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -411,16 +405,16 @@ async fn deactivate_facet_search_and_add_documents() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "The facet search is disabled for this index",
|
||||
@@ -433,8 +427,8 @@ async fn deactivate_facet_search_and_add_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -442,31 +436,31 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": true,
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -474,25 +468,25 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": serde_json::Value::Null,
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -618,8 +612,8 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_facet_search_on_movies() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -925,26 +919,26 @@ async fn distinct_facet_search_on_movies() {
|
||||
]);
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;
|
||||
|
||||
// non-exhaustive facet count is counting 27 documents with the facet query "blob" but there are only 23 documents with a distinct color.
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":27}]"###);
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "", "exhaustiveFacetCount": true })).await;
|
||||
|
||||
// exhaustive facet count is counting 23 documents with the facet query "blob" which is the number of distinct colors.
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":23}]"###);
|
||||
}
|
||||
|
||||
@@ -4,23 +4,14 @@ use tempfile::TempDir;
|
||||
|
||||
use super::test_settings_documents_indexing_swapping_and_search;
|
||||
use crate::common::{
|
||||
default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS,
|
||||
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
|
||||
DOCUMENTS, NESTED_DOCUMENTS,
|
||||
};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_filter_string_notation() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
let res = index.wait_task(task.uid()).await;
|
||||
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -28,44 +19,34 @@ async fn search_with_filter_string_notation() {
|
||||
"filter": "title = Gläss"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let nested_index = shared_index_with_nested_documents().await;
|
||||
|
||||
let (_, code) =
|
||||
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
let res = index.wait_task(task.uid()).await;
|
||||
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
|
||||
|
||||
index
|
||||
nested_index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "cattos = pésti"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["hits"][0]["id"], json!(852));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
nested_index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "doggos.age > 5"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(response["hits"][0]["id"], json!(654));
|
||||
assert_eq!(response["hits"][1]["id"], json!(951));
|
||||
@@ -82,7 +63,7 @@ async fn search_with_filter_array_notation() {
|
||||
"filter": ["title = Gläss"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index
|
||||
@@ -90,7 +71,7 @@ async fn search_with_filter_array_notation() {
|
||||
"filter": [["title = Gläss", "title = \"Shazam!\"", "title = \"Escape Room\""]]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 3);
|
||||
}
|
||||
|
||||
@@ -116,7 +97,7 @@ async fn search_with_contains_filter() {
|
||||
"filter": "title CONTAINS cap"
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
@@ -269,16 +250,14 @@ async fn search_with_pattern_filter_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap();
|
||||
let server = Server::new_shared();
|
||||
|
||||
eprintln!("Documents -> Settings -> test");
|
||||
let index = server.index("test");
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index
|
||||
.update_settings(json!({"filterableAttributes": [{
|
||||
@@ -289,9 +268,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@@ -335,7 +313,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -355,9 +333,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@@ -467,9 +444,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter returns an error
|
||||
index
|
||||
@@ -481,7 +457,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -567,9 +543,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@@ -613,7 +588,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -720,7 +695,7 @@ async fn test_filterable_attributes_priority() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
|
||||
"message": "Index `[uuid]`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -746,7 +721,7 @@ async fn test_filterable_attributes_priority() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
|
||||
"message": "Index `[uuid]`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
|
||||
@@ -1,55 +1,13 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use super::test_settings_documents_indexing_swapping_and_search;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::shared_index_with_geo_documents;
|
||||
use crate::json;
|
||||
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Taco Truck",
|
||||
"address": "444 Salsa Street, Burritoville",
|
||||
"type": "Mexican",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": 34.0522,
|
||||
"lng": -118.2437
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "La Bella Italia",
|
||||
"address": "456 Elm Street, Townsville",
|
||||
"type": "Italian",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": "45.4777599",
|
||||
"lng": "9.1967508"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Crêpe Truck",
|
||||
"address": "2 Billig Avenue, Rouenville",
|
||||
"type": "French",
|
||||
"rating": 10
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn geo_sort_with_geo_strings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||
index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let index = shared_index_with_geo_documents().await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -58,7 +16,7 @@ async fn geo_sort_with_geo_strings() {
|
||||
"sort": ["_geoPoint(0.0, 0.0):asc"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@@ -66,14 +24,7 @@ async fn geo_sort_with_geo_strings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn geo_bounding_box_with_string_and_number() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||
index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||
let (ret, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(ret.uid()).await.succeeded();
|
||||
let index = shared_index_with_geo_documents().await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -81,7 +32,7 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
"filter": "_geoBoundingBox([89, 179], [-89, -179])",
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -123,14 +74,7 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
#[actix_rt::test]
|
||||
async fn bug_4640() {
|
||||
// https://github.com/meilisearch/meilisearch/issues/4640
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||
let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||
index.wait_task(ret.uid()).await.succeeded();
|
||||
let index = shared_index_with_geo_documents().await;
|
||||
|
||||
// Sort the document with the second one first
|
||||
index
|
||||
@@ -139,7 +83,7 @@ async fn bug_4640() {
|
||||
"sort": ["_geoPoint(45.4777599, 9.1967508):asc"],
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -202,7 +146,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
|
||||
&json!({"q": "jean"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -247,7 +191,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
|
||||
&json!({"q": "bob"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -284,7 +228,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
|
||||
&json!({"q": "intel"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -324,7 +268,7 @@ async fn geo_sort_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "sort"], "sortableAttributes": [RESERVED_GEO_FIELD_NAME]}),
|
||||
&json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
|
||||
@@ -2,31 +2,31 @@ use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents_user_provided<'a>(
|
||||
server: &'a Server,
|
||||
server: &'a Server<Shared>,
|
||||
documents: &Value,
|
||||
) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "embedders": {"default": {
|
||||
"source": "userProvided",
|
||||
"dimensions": 2}}} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
|
||||
async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "embedders": {"default": {
|
||||
@@ -36,11 +36,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> I
|
||||
"documentTemplate": "{{doc.title}}, {{doc.desc}}"
|
||||
}}} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
@@ -76,6 +76,48 @@ static SINGLE_DOCUMENT_VEC: Lazy<Value> = Lazy::new(|| {
|
||||
}])
|
||||
});
|
||||
|
||||
static TEST_DISTINCT_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
// for query "Captain Marvel" and vector [1.0, 1.0]
|
||||
json!([
|
||||
{
|
||||
"id": 0,
|
||||
"search": "Captain Planet",
|
||||
"desc": "#2 for keyword search, #3 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [-1.0, 0.0],
|
||||
},
|
||||
"distinct": 0
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"search": "Captain Marvel",
|
||||
"desc": "#1 for keyword search, #4 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [-1.0, -1.0],
|
||||
},
|
||||
"distinct": 1
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"search": "Some Captain at least",
|
||||
"desc": "#3 for keyword search, #1 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [1.0, 1.0],
|
||||
},
|
||||
"distinct": 0
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"search": "Irrelevant Capitaine",
|
||||
"desc": "#4 for keyword search, #2 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [1.0, 0.0],
|
||||
},
|
||||
"distinct": 1
|
||||
},
|
||||
])
|
||||
});
|
||||
|
||||
static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
@@ -97,8 +139,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -130,8 +172,8 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn limit_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -143,8 +185,8 @@ async fn limit_offset() {
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -159,8 +201,8 @@ async fn limit_offset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_hf() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -211,8 +253,8 @@ async fn simple_search_hf() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distribution_shift() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true});
|
||||
let (response, code) = index.search_post(search.clone()).await;
|
||||
@@ -233,7 +275,7 @@ async fn distribution_shift() {
|
||||
.await;
|
||||
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(response["details"], @r#"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"#);
|
||||
|
||||
let (response, code) = index.search_post(search).await;
|
||||
@@ -243,8 +285,8 @@ async fn distribution_shift() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn highlighter() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
||||
@@ -298,8 +340,8 @@ async fn highlighter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_semantic_ratio() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -370,8 +412,8 @@ async fn invalid_semantic_ratio() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn single_document() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SINGLE_DOCUMENT_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SINGLE_DOCUMENT_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -386,8 +428,8 @@ async fn single_document() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn query_combination() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
// search without query and vector, but with hybrid => still placeholder
|
||||
let (response, code) = index
|
||||
@@ -493,10 +535,54 @@ async fn query_combination() {
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
}
|
||||
|
||||
// see <https://github.com/meilisearch/meilisearch/issues/5526>
|
||||
#[actix_rt::test]
|
||||
async fn distinct_is_applied() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &TEST_DISTINCT_DOCUMENTS).await;
|
||||
|
||||
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// pure keyword
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.0, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1},{"id":0,"search":"Captain Planet","desc":"#2 for keyword search, #3 for hybrid search","distinct":0}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"null");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
|
||||
// pure semantic
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":3,"search":"Irrelevant Capitaine","desc":"#4 for keyword search, #2 for hybrid search","distinct":1}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"2");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
|
||||
// hybrid
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"1");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn retrieve_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -546,7 +632,7 @@ async fn retrieve_vectors() {
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
@@ -596,7 +682,7 @@ async fn retrieve_vectors() {
|
||||
// remove `_vectors` from displayed attributes
|
||||
let (response, code) =
|
||||
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
|
||||
@@ -89,9 +89,9 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
.update_settings(
|
||||
@@ -147,23 +147,20 @@ async fn simple_search() {
|
||||
.search(
|
||||
json!({"q": "進撃", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 852
|
||||
},
|
||||
{
|
||||
"id": 853
|
||||
}
|
||||
],
|
||||
"query": "進撃",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
@@ -172,23 +169,20 @@ async fn simple_search() {
|
||||
// chinese
|
||||
index
|
||||
.search(json!({"q": "进击", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 853
|
||||
},
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
],
|
||||
"query": "进击",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"200 OK");
|
||||
})
|
||||
.await;
|
||||
@@ -196,9 +190,9 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -211,10 +205,10 @@ async fn force_locales() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -274,9 +268,9 @@ async fn force_locales() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales_with_pattern() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -289,10 +283,10 @@ async fn force_locales_with_pattern() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -352,9 +346,9 @@ async fn force_locales_with_pattern() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales_with_pattern_nested() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@@ -365,10 +359,10 @@ async fn force_locales_with_pattern_nested() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -423,9 +417,9 @@ async fn force_locales_with_pattern_nested() {
|
||||
}
|
||||
#[actix_rt::test]
|
||||
async fn force_different_locales_with_pattern() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -440,10 +434,10 @@ async fn force_different_locales_with_pattern() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -499,9 +493,9 @@ async fn force_different_locales_with_pattern() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -518,10 +512,10 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -577,9 +571,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn auto_infer_locales_at_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -592,10 +586,10 @@ async fn auto_infer_locales_at_search() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -676,9 +670,9 @@ async fn auto_infer_locales_at_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_different_locales_with_pattern_nested() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@@ -691,10 +685,10 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -774,9 +768,9 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn settings_change() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -789,10 +783,10 @@ async fn settings_change() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 1,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -852,10 +846,10 @@ async fn settings_change() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -906,9 +900,9 @@ async fn settings_change() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_locales() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
.update_settings(
|
||||
@@ -945,9 +939,9 @@ async fn invalid_locales() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_localized_attributes_rules() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
"localizedAttributes": [
|
||||
@@ -1015,19 +1009,19 @@ async fn invalid_localized_attributes_rules() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
"filterableAttributes": ["name_en", "name_ja", "name_zh"],
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -1073,9 +1067,9 @@ async fn simple_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn facet_search_with_localized_attributes() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@@ -1086,10 +1080,10 @@ async fn facet_search_with_localized_attributes() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -1146,9 +1140,9 @@ async fn facet_search_with_localized_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn swedish_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{"id": "tra1-1", "product": "trä"},
|
||||
{"id": "tra2-1", "product": "traktor"},
|
||||
@@ -1269,9 +1263,9 @@ async fn swedish_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn german_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{"id": 1, "product": "Interkulturalität"},
|
||||
{"id": 2, "product": "Wissensorganisation"},
|
||||
|
||||
@@ -2,11 +2,11 @@ use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -48,8 +48,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
@@ -75,8 +75,8 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_typo() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Capitain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
@@ -102,8 +102,8 @@ async fn search_with_typo() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_unknown_word() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Supercopter Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -2296,6 +2296,7 @@ async fn error_remote_500_once() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn error_remote_timeout() {
|
||||
let ms0 = Server::new().await;
|
||||
let ms1 = Server::new().await;
|
||||
|
||||
@@ -7,7 +7,7 @@ async fn default_search_should_return_estimated_total_hit() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert!(response.get("estimatedTotalHits").is_some());
|
||||
assert!(response.get("limit").is_some());
|
||||
assert!(response.get("offset").is_some());
|
||||
@@ -25,7 +25,7 @@ async fn simple_search() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"page": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 1);
|
||||
@@ -44,7 +44,7 @@ async fn page_zero_should_not_return_any_result() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"page": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 0);
|
||||
@@ -58,7 +58,7 @@ async fn hits_per_page_1() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"hitsPerPage": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
@@ -72,7 +72,7 @@ async fn hits_per_page_0_should_not_return_any_result() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"hitsPerPage": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
@@ -126,7 +126,7 @@ async fn ensure_placeholder_search_hit_count_valid() {
|
||||
for page in 0..=4 {
|
||||
index
|
||||
.search(json!({"page": page, "hitsPerPage": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["totalHits"], 4);
|
||||
assert_eq!(response["totalPages"], 4);
|
||||
})
|
||||
|
||||
@@ -2,11 +2,11 @@ use meili_snap::{json_string, snapshot};
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -34,8 +34,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
@@ -51,8 +51,8 @@ async fn simple_search_on_title() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_no_searchable_attribute_set() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -93,8 +93,8 @@ async fn search_no_searchable_attribute_set() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_all_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
|
||||
@@ -106,8 +106,8 @@ async fn search_on_all_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_all_attributes_restricted_set() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
@@ -121,8 +121,8 @@ async fn search_on_all_attributes_restricted_set() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_prefix_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
@@ -135,8 +135,8 @@ async fn simple_prefix_search_on_title() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title_matching_strategy_all() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search matching strategy all should only return 1 document (ids: 2).
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["title"], "matchingStrategy": "all"}), |response, code| {
|
||||
@@ -148,8 +148,8 @@ async fn simple_search_on_title_matching_strategy_all() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_no_field() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search on no field shouldn't return any document.
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": []}), |response, code| {
|
||||
@@ -161,8 +161,8 @@ async fn simple_search_on_no_field() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before document 2.
|
||||
index
|
||||
@@ -189,8 +189,8 @@ async fn word_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order_exact_words() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) = index
|
||||
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
|
||||
.await;
|
||||
@@ -221,9 +221,9 @@ async fn word_ranking_rule_order_exact_words() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn typo_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Capitain Marivel",
|
||||
@@ -260,9 +260,9 @@ async fn typo_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attributes_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@@ -301,9 +301,9 @@ async fn attributes_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn exactness_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@@ -340,9 +340,9 @@ async fn exactness_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_exact_field() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@@ -359,7 +359,7 @@ async fn search_on_exact_field() {
|
||||
|
||||
let (response, code) =
|
||||
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
// Searching on an exact attribute should only return the document matching without typo.
|
||||
index
|
||||
@@ -372,7 +372,7 @@ async fn search_on_exact_field() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn phrase_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let documents = json!([
|
||||
{ "id": 8, "desc": "Document Review", "title": "Document Review Specialist II" },
|
||||
{ "id": 5, "desc": "Document Review", "title": "Document Review Attorney" },
|
||||
@@ -383,7 +383,7 @@ async fn phrase_search_on_title() {
|
||||
{ "id": 7, "desc": "Document Review", "title": "Document Review Specialist II" },
|
||||
{ "id": 6, "desc": "Document Review", "title": "Document Review (Entry Level)" }
|
||||
]);
|
||||
let index = index_with_documents(&server, &documents).await;
|
||||
let index = index_with_documents(server, &documents).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -416,3 +416,381 @@ async fn phrase_search_on_title() {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
static NESTED_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"details": {
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"weaknesses": ["magic", "requires transformation"],
|
||||
"outfit": {
|
||||
"has_cape": true,
|
||||
"colors": {
|
||||
"primary": "red",
|
||||
"secondary": "gold"
|
||||
}
|
||||
}
|
||||
},
|
||||
"id": "1",
|
||||
},
|
||||
{
|
||||
"details": {
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"blue_skin": true,
|
||||
"outfit": {
|
||||
"has_cape": false
|
||||
}
|
||||
},
|
||||
"id": "2",
|
||||
},
|
||||
{
|
||||
"details": {
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"weaknesses": ["magic", "power instability"],
|
||||
"outfit": {
|
||||
"has_cape": false
|
||||
}
|
||||
},
|
||||
"id": "3",
|
||||
}])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard should match to 'details.' attribute
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard should match to any attribute inside 'details.'
|
||||
// It's worth noting the difference between 'details.*' and '*.title'
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Should return 1 document (ids: 1)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "gold", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Should return 2 documents (ids: 1 and 2)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "true", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) =
|
||||
index.update_settings_searchable_attributes(json!(["details.title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_title_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_details_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "mag", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "3"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_matching_strategy_all() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Nested search matching strategy all should only return 1 document (ids: 3)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before documents 1 and 2
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.desc", "*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]
|
||||
"###
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before documents 1 and 2
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]
|
||||
"###
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ source: crates/meilisearch/tests/search/distinct.rs
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "tamo",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
|
||||
@@ -3,8 +3,8 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
|
||||
index.wait_task(task1.uid()).await.succeeded();
|
||||
@@ -24,8 +24,8 @@ async fn set_and_reset_distinct_attribute() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(update_task1.uid()).await.succeeded();
|
||||
|
||||
@@ -11,59 +11,62 @@ macro_rules! test_setting_routes {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (_response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
server.index("").wait_task(0).await;
|
||||
let (response, code) = server.index("test").get().await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (_, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = server.index("").wait_task(0).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_default() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
index.wait_task(0).await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_default() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let url = format!("/indexes/{}/settings/{}",
|
||||
index.uid,
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
let expected = crate::json!($default_value);
|
||||
assert_eq!(expected, response);
|
||||
}
|
||||
@@ -195,15 +198,16 @@ test_setting_routes!(
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").settings().await;
|
||||
assert_eq!(code, 404, "{}", response)
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 404, "{response}")
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
@@ -247,9 +251,8 @@ async fn get_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn secrets_are_hidden_in_settings() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
@@ -269,11 +272,11 @@ async fn secrets_are_hidden_in_settings() {
|
||||
.await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
"taskUid": 1,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -282,7 +285,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
let settings_update_uid = response.uid();
|
||||
|
||||
index.wait_task(settings_update_uid).await;
|
||||
index.wait_task(settings_update_uid).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
@@ -370,16 +373,16 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_settings_unknown_field() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (_response, code) = index.update_settings(json!({"foo": 12})).await;
|
||||
assert_eq!(code, 400);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_partial_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
@@ -398,20 +401,18 @@ async fn test_partial_update() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_delete_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.delete_settings().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn reset_all_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -423,7 +424,6 @@ async fn reset_all_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (update_task,_status_code) = index
|
||||
@@ -456,17 +456,15 @@ async fn reset_all_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_setting_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.update_settings(json!({})).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (_response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
let (task, _status_code) = index.delete_settings().await;
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -487,8 +485,8 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_set_invalid_ranking_rules() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
|
||||
let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
|
||||
@@ -505,8 +503,8 @@ async fn error_set_invalid_ranking_rules() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -526,8 +524,8 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn granular_filterable_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
|
||||
let (response, code) =
|
||||
@@ -545,7 +543,7 @@ async fn granular_filterable_attributes() {
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response["filterableAttributes"]), @r###"
|
||||
[
|
||||
{
|
||||
|
||||
@@ -26,11 +26,11 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_docs_and_disable() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -38,8 +38,8 @@ async fn add_docs_and_disable() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// only 1 document should match
|
||||
index
|
||||
@@ -86,8 +86,8 @@ async fn add_docs_and_disable() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn disable_and_add_docs() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -95,11 +95,11 @@ async fn disable_and_add_docs() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// only 1 document should match
|
||||
index
|
||||
@@ -145,8 +145,8 @@ async fn disable_and_add_docs() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn disable_add_docs_and_enable() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -154,11 +154,11 @@ async fn disable_add_docs_and_enable() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -166,8 +166,8 @@ async fn disable_add_docs_and_enable() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(2).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// all documents should match
|
||||
index
|
||||
@@ -253,8 +253,8 @@ async fn disable_add_docs_and_enable() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn disable_add_docs_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -262,11 +262,11 @@ async fn disable_add_docs_and_reset() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -274,8 +274,8 @@ async fn disable_add_docs_and_reset() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(2).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// all documents should match
|
||||
index
|
||||
@@ -361,19 +361,19 @@ async fn disable_add_docs_and_reset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn default_behavior() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// all documents should match
|
||||
index
|
||||
|
||||
@@ -26,8 +26,8 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -38,7 +38,7 @@ async fn attribute_scale_search() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [1, 3, 2] instead of [3, 1, 2]
|
||||
@@ -99,8 +99,8 @@ async fn attribute_scale_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_phrase_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -167,8 +167,8 @@ async fn attribute_scale_phrase_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_scale_set_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -282,8 +282,8 @@ async fn word_scale_set_and_reset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_default_ranking_rules() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -293,7 +293,7 @@ async fn attribute_scale_default_ranking_rules() {
|
||||
"proximityPrecision": "byAttribute"
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [3, 1, 2]
|
||||
|
||||
@@ -5,8 +5,8 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index
|
||||
.update_settings(json!({
|
||||
@@ -70,8 +70,8 @@ async fn set_and_search() {
|
||||
},
|
||||
]);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
@@ -224,8 +224,8 @@ async fn advanced_synergies() {
|
||||
},
|
||||
]);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
|
||||
@@ -6,11 +6,11 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -26,12 +26,12 @@ async fn similar_unexisting_index() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_unexisting_parameter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "marin": "hello"}), |response, code| {
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
})
|
||||
.await;
|
||||
@@ -39,8 +39,8 @@ async fn similar_unexisting_parameter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -53,7 +53,7 @@ async fn similar_bad_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"id": ["doggo"], "embedder": "manual"})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@@ -69,8 +69,8 @@ async fn similar_bad_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -83,7 +83,7 @@ async fn similar_bad_ranking_score_threshold() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": ["doggo"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@@ -99,8 +99,8 @@ async fn similar_bad_ranking_score_threshold() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_invalid_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -113,7 +113,7 @@ async fn similar_invalid_ranking_score_threshold() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": 42})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@@ -129,8 +129,8 @@ async fn similar_invalid_ranking_score_threshold() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_invalid_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -143,7 +143,7 @@ async fn similar_invalid_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": "http://invalid-docid/", "embedder": "manual"})).await;
|
||||
@@ -160,8 +160,8 @@ async fn similar_invalid_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_not_found_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -174,7 +174,7 @@ async fn similar_not_found_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await;
|
||||
@@ -191,8 +191,8 @@ async fn similar_not_found_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -205,7 +205,7 @@ async fn similar_bad_offset() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await;
|
||||
@@ -233,8 +233,8 @@ async fn similar_bad_offset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_limit() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -247,7 +247,7 @@ async fn similar_bad_limit() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await;
|
||||
@@ -277,8 +277,8 @@ async fn similar_bad_limit() {
|
||||
async fn similar_bad_filter() {
|
||||
// Since a filter is deserialized as a json Value it will never fail to deserialize.
|
||||
// Thus the error message is not generated by deserr but written by us.
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -291,7 +291,7 @@ async fn similar_bad_filter() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
@@ -316,8 +316,8 @@ async fn similar_bad_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_object() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -330,7 +330,7 @@ async fn filter_invalid_syntax_object() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -354,8 +354,8 @@ async fn filter_invalid_syntax_object() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -368,7 +368,7 @@ async fn filter_invalid_syntax_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -392,8 +392,8 @@ async fn filter_invalid_syntax_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -406,7 +406,7 @@ async fn filter_invalid_syntax_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -432,8 +432,8 @@ async fn filter_invalid_syntax_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -446,7 +446,7 @@ async fn filter_invalid_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -473,8 +473,8 @@ async fn filter_invalid_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -487,7 +487,7 @@ async fn filter_invalid_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -514,8 +514,8 @@ async fn filter_invalid_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -528,7 +528,7 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -554,8 +554,8 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -568,7 +568,7 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -594,8 +594,8 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -608,7 +608,7 @@ async fn filter_reserved_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -634,8 +634,8 @@ async fn filter_reserved_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -648,7 +648,7 @@ async fn filter_reserved_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -674,8 +674,8 @@ async fn filter_reserved_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_point_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -688,7 +688,7 @@ async fn filter_reserved_geo_point_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -714,8 +714,8 @@ async fn filter_reserved_geo_point_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_point_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -728,7 +728,7 @@ async fn filter_reserved_geo_point_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -754,8 +754,8 @@ async fn filter_reserved_geo_point_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_retrieve_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await;
|
||||
@@ -806,8 +806,8 @@ async fn similar_bad_retrieve_vectors() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_embedder() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -820,7 +820,7 @@ async fn similar_bad_embedder() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
|
||||
@@ -47,8 +47,8 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn basic() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -61,12 +61,12 @@ async fn basic() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(
|
||||
@@ -233,8 +233,8 @@ async fn basic() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -247,12 +247,12 @@ async fn ranking_score_threshold() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(
|
||||
@@ -503,8 +503,8 @@ async fn ranking_score_threshold() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -517,12 +517,12 @@ async fn filter() {
|
||||
"filterableAttributes": ["title", "release_year"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(
|
||||
@@ -621,8 +621,8 @@ async fn filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn limit_and_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_prefix("test");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -635,12 +635,12 @@ async fn limit_and_offset() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(
|
||||
|
||||
@@ -6,8 +6,8 @@ use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
async fn get_version() {
|
||||
let server = Server::new_shared();
|
||||
let (response, code) = server.version().await;
|
||||
assert_eq!(code, 200);
|
||||
let version = response.as_object().unwrap();
|
||||
@@ -18,7 +18,7 @@ async fn get_settings_unexisting_index() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_healthyness() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
|
||||
let (response, status_code) = server.service.get("/health").await;
|
||||
assert_eq!(status_code, 200);
|
||||
@@ -55,7 +55,7 @@ async fn stats() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
assert_eq!(code, 202, "{response}");
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
@@ -78,8 +78,8 @@ async fn stats() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_remove_embeddings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -216,8 +216,8 @@ async fn add_remove_embeddings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_remove_embedded_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -293,8 +293,8 @@ async fn add_remove_embedded_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_embedder_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
// 2 embedded documents for 3 embeddings in total
|
||||
// but no embedders are added in the settings yet so we expect 0 embedded documents for 0 embeddings in total
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
mod errors;
|
||||
mod webhook;
|
||||
|
||||
use meili_snap::insta::assert_json_snapshot;
|
||||
use meili_snap::snapshot;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
@@ -11,14 +10,12 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_task_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_task(1).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.get_task(u32::MAX as u64).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Task `1` not found.",
|
||||
"message": "Task `4294967295` not found.",
|
||||
"code": "task_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
||||
@@ -30,8 +27,8 @@ async fn error_get_unexisting_task_status() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_task_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (create_task, _status_code) = index.create(None).await;
|
||||
let (add_task, _status_code) = index
|
||||
.add_documents(
|
||||
@@ -42,7 +39,7 @@ async fn get_task_status() {
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_task(create_task.uid()).await.succeeded();
|
||||
server.wait_task(create_task.uid()).await.succeeded();
|
||||
let (_response, code) = index.get_task(add_task.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
// TODO check response format, as per #48
|
||||
@@ -50,10 +47,11 @@ async fn get_task_status() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
@@ -64,6 +62,7 @@ async fn list_tasks() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_pagination_and_reverse() {
|
||||
// do not use a shared server here, as we want to assert tasks ids and we need them to be stable
|
||||
let server = Server::new().await;
|
||||
// First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes
|
||||
let mut last_task = None;
|
||||
@@ -71,7 +70,7 @@ async fn list_tasks_pagination_and_reverse() {
|
||||
let index = server.index(format!("test-{i}"));
|
||||
last_task = Some(index.create(None).await.0.uid());
|
||||
}
|
||||
server.wait_task(last_task.unwrap()).await;
|
||||
server.wait_task(last_task.unwrap()).await.succeeded();
|
||||
|
||||
let (response, code) = server.tasks_filter("limit=3").await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -103,13 +102,14 @@ async fn list_tasks_pagination_and_reverse() {
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_with_star_filters() {
|
||||
let server = Server::new().await;
|
||||
// Do not use a unique index here, as we want to test the `indexUids=*` filter.
|
||||
let index = server.index("test");
|
||||
let (task, _code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
let (response, code) = index.service.get("/tasks?indexUids=test").await;
|
||||
let (response, code) = index.service.get(format!("/tasks?indexUids={}", index.uid)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
@@ -127,93 +127,102 @@ async fn list_tasks_with_star_filters() {
|
||||
|
||||
let (response, code) =
|
||||
index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(code, 200, "{response:?}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test")
|
||||
.get(format!(
|
||||
"/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}",
|
||||
index.uid
|
||||
))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(code, 200, "{response:?}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(code, 200, "{response:?}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_filtered() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_type_filtered() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) =
|
||||
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_invalid_canceled_by_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _code) = index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let (response, code) =
|
||||
index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_and_type_filtered() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 0);
|
||||
|
||||
let (response, code) = index
|
||||
@@ -223,12 +232,12 @@ async fn list_tasks_status_and_type_filtered() {
|
||||
&[],
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
macro_rules! assert_valid_summarized_task {
|
||||
($response:expr, $task_type:literal, $index:literal) => {{
|
||||
($response:expr, $task_type:literal, $index:tt) => {{
|
||||
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||
assert!($response["taskUid"].as_u64().is_some());
|
||||
assert_eq!($response["indexUid"], $index);
|
||||
@@ -242,49 +251,49 @@ macro_rules! assert_valid_summarized_task {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_task_view() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let index_uid = index.uid.clone();
|
||||
|
||||
let (response, _) = index.create(None).await;
|
||||
assert_valid_summarized_task!(response, "indexCreation", "test");
|
||||
assert_valid_summarized_task!(response, "indexCreation", index_uid);
|
||||
|
||||
let (response, _) = index.update(None).await;
|
||||
assert_valid_summarized_task!(response, "indexUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "indexUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.update_settings(json!({})).await;
|
||||
assert_valid_summarized_task!(response, "settingsUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "settingsUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.delete_document(1).await;
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
|
||||
|
||||
let (response, _) = index.clear_all_documents().await;
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
|
||||
|
||||
let (response, _) = index.delete().await;
|
||||
assert_valid_summarized_task!(response, "indexDeletion", "test");
|
||||
assert_valid_summarized_task!(response, "indexDeletion", index_uid);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_document_addition_or_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) =
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(0).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -302,15 +311,14 @@ async fn test_summarized_document_addition_or_update() {
|
||||
|
||||
let (task, _status_code) =
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(1).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -329,18 +337,22 @@ async fn test_summarized_document_addition_or_update() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_delete_documents_by_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(0).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let non_existing_task_id1 = u32::MAX as u64;
|
||||
let non_existing_task_id2 = non_existing_task_id1 - 1;
|
||||
let non_existing_task_id3 = non_existing_task_id1 - 2;
|
||||
let (task, _status_code) = index
|
||||
.delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3])
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -350,7 +362,7 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
"originalFilter": null
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -364,15 +376,14 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
|
||||
index.create(None).await;
|
||||
let (del_task, _status_code) = index.delete_batch(vec![42]).await;
|
||||
index.wait_task(del_task.uid()).await.succeeded();
|
||||
server.wait_task(del_task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(del_task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -392,20 +403,19 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_delete_documents_by_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -415,7 +425,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
"originalFilter": "\"doggo = bernese\""
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -430,15 +440,14 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
index.create(None).await;
|
||||
let (task, _status_code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -448,7 +457,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
"originalFilter": "\"doggo = bernese\""
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
|
||||
"message": "Index `[uuid]`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
|
||||
"code": "invalid_document_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
|
||||
@@ -463,15 +472,14 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
|
||||
let (task, _status_code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 4,
|
||||
"batchUid": 4,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -491,18 +499,17 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_delete_document_by_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.delete_document(1).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -512,7 +519,7 @@ async fn test_summarized_delete_document_by_id() {
|
||||
"originalFilter": null
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -526,15 +533,14 @@ async fn test_summarized_delete_document_by_id() {
|
||||
|
||||
index.create(None).await;
|
||||
let (task, _status_code) = index.delete_document(42).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -554,12 +560,12 @@ async fn test_summarized_delete_document_by_id() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_settings_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// here we should find my payload even in the failed task.
|
||||
let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await;
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
|
||||
"code": "invalid_settings_ranking_rules",
|
||||
@@ -569,15 +575,14 @@ async fn test_summarized_settings_update() {
|
||||
"###);
|
||||
|
||||
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -605,18 +610,17 @@ async fn test_summarized_settings_update() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_creation() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexCreation",
|
||||
"canceledBy": null,
|
||||
@@ -632,15 +636,14 @@ async fn test_summarized_index_creation() {
|
||||
"###);
|
||||
|
||||
let (task, _status_code) = index.create(Some("doggos")).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexCreation",
|
||||
"canceledBy": null,
|
||||
@@ -648,7 +651,7 @@ async fn test_summarized_index_creation() {
|
||||
"primaryKey": "doggos"
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` already exists.",
|
||||
"message": "Index `[uuid]` already exists.",
|
||||
"code": "index_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_already_exists"
|
||||
@@ -663,16 +666,16 @@ async fn test_summarized_index_creation() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_deletion() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (ret, _code) = index.delete().await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -680,7 +683,7 @@ async fn test_summarized_index_deletion() {
|
||||
"deletedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -697,13 +700,13 @@ async fn test_summarized_index_deletion() {
|
||||
// both tasks may get autobatched and the deleted documents count will be wrong.
|
||||
let (ret, _code) =
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -720,13 +723,13 @@ async fn test_summarized_index_deletion() {
|
||||
"###);
|
||||
|
||||
let (ret, _code) = index.delete().await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -743,13 +746,13 @@ async fn test_summarized_index_deletion() {
|
||||
|
||||
// What happens when you delete an index that doesn't exists.
|
||||
let (ret, _code) = index.delete().await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -757,7 +760,7 @@ async fn test_summarized_index_deletion() {
|
||||
"deletedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -772,19 +775,18 @@ async fn test_summarized_index_deletion() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// If the index doesn't exist yet, we should get errors with or without the primary key.
|
||||
let (task, _status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -792,7 +794,7 @@ async fn test_summarized_index_update() {
|
||||
"primaryKey": null
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -805,15 +807,14 @@ async fn test_summarized_index_update() {
|
||||
"###);
|
||||
|
||||
let (task, _status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -821,7 +822,7 @@ async fn test_summarized_index_update() {
|
||||
"primaryKey": "bones"
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -837,15 +838,14 @@ async fn test_summarized_index_update() {
|
||||
index.create(None).await;
|
||||
|
||||
let (task, _status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -861,15 +861,14 @@ async fn test_summarized_index_update() {
|
||||
"###);
|
||||
|
||||
let (task, _status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 4,
|
||||
"batchUid": 4,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -887,7 +886,7 @@ async fn test_summarized_index_update() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_swap() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let (task, _status_code) = server
|
||||
.index_swap(json!([
|
||||
{ "indexes": ["doggos", "cattos"] }
|
||||
@@ -895,12 +894,11 @@ async fn test_summarized_index_swap() {
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = server.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "failed",
|
||||
"type": "indexSwap",
|
||||
@@ -928,23 +926,25 @@ async fn test_summarized_index_swap() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (task, _code) = server.index("doggos").create(None).await;
|
||||
let doggos_index = server.unique_index();
|
||||
let (task, _code) = doggos_index.create(None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _code) = server.index("cattos").create(None).await;
|
||||
let cattos_index = server.unique_index();
|
||||
let (task, _code) = cattos_index.create(None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _code) = server
|
||||
.index_swap(json!([
|
||||
{ "indexes": ["doggos", "cattos"] }
|
||||
{ "indexes": [doggos_index.uid, cattos_index.uid] }
|
||||
]))
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = server.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(json_string!(task,
|
||||
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.indexes[0]" => "doggos", ".**.indexes[1]" => "cattos", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "indexSwap",
|
||||
@@ -970,20 +970,21 @@ async fn test_summarized_index_swap() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_task_cancelation() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggos");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// to avoid being flaky we're only going to cancel an already finished task :(
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = server.cancel_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let task_uid = task.uid();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(json_string!(task,
|
||||
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "taskCancelation",
|
||||
@@ -991,7 +992,7 @@ async fn test_summarized_task_cancelation() {
|
||||
"details": {
|
||||
"matchedTasks": 1,
|
||||
"canceledTasks": 0,
|
||||
"originalFilter": "?uids=0"
|
||||
"originalFilter": "[of]"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
@@ -1004,20 +1005,19 @@ async fn test_summarized_task_cancelation() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_task_deletion() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggos");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// to avoid being flaky we're only going to delete an already finished task :(
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = server.delete_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "taskDeletion",
|
||||
@@ -1038,22 +1038,22 @@ async fn test_summarized_task_deletion() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_dump_creation() {
|
||||
// Do not use a shared server because it takes too long to create a dump
|
||||
let server = Server::new().await;
|
||||
let (task, _status_code) = server.create_dump().await;
|
||||
server.wait_task(task.uid()).await;
|
||||
let (task, _) = server.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "dumpCreation",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"dumpUid": "[dumpUid]"
|
||||
"dumpUid": "[dump_uid]"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -43,7 +43,7 @@ async fn version_too_old() {
|
||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.0");
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.2");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
|
||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.15.1 is higher than the Meilisearch version 1.15.0. Downgrade is not supported");
|
||||
snapshot!(err, @"Database version 1.15.3 is higher than the Meilisearch version 1.15.2. Downgrade is not supported");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -6,8 +6,8 @@ use crate::vector::generate_default_user_provided_documents;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -65,8 +65,8 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn binary_quantize_before_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -139,8 +139,8 @@ async fn binary_quantize_before_sending_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn binary_quantize_after_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -226,8 +226,8 @@ async fn binary_quantize_after_sending_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn try_to_disable_binary_quantization() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -256,11 +256,11 @@ async fn try_to_disable_binary_quantization() {
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let ret = server.wait_task(response.uid()).await;
|
||||
snapshot!(ret, @r#"
|
||||
snapshot!(json_string!(ret, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".finishedAt" => "[date]", ".startedAt" => "[date]" }), @r#"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "doggo",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -274,7 +274,7 @@ async fn try_to_disable_binary_quantization() {
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
|
||||
"message": "Index `[uuid]`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
|
||||
"code": "invalid_settings_embedders",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"
|
||||
|
||||
@@ -9,15 +9,15 @@ edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.95"
|
||||
clap = { version = "4.5.24", features = ["derive"] }
|
||||
anyhow = "1.0.98"
|
||||
clap = { version = "4.5.40", features = ["derive"] }
|
||||
dump = { path = "../dump" }
|
||||
file-store = { path = "../file-store" }
|
||||
indexmap = { version = "2.7.0", features = ["serde"] }
|
||||
indexmap = { version = "2.9.0", features = ["serde"] }
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||
tempfile = "3.15.0"
|
||||
time = { version = "0.3.37", features = ["formatting", "parsing", "alloc"] }
|
||||
uuid = { version = "1.11.0", features = ["v4"], default-features = false }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
||||
tempfile = "3.20.0"
|
||||
time = { version = "0.3.41", features = ["formatting", "parsing", "alloc"] }
|
||||
uuid = { version = "1.17.0", features = ["v4"], default-features = false }
|
||||
|
||||
@@ -162,8 +162,8 @@ fn rebuild_field_distribution(db_path: &Path) -> anyhow::Result<()> {
|
||||
let (uid, uuid) = result?;
|
||||
progress.update_progress(VariableNameStep::new(
|
||||
&uid,
|
||||
index_index as u32,
|
||||
index_count as u32,
|
||||
index_index as u64,
|
||||
index_count as u64,
|
||||
));
|
||||
let index_path = db_path.join("indexes").join(uuid.to_string());
|
||||
|
||||
@@ -220,12 +220,12 @@ fn rebuild_field_distribution(db_path: &Path) -> anyhow::Result<()> {
|
||||
|
||||
pub struct VariableNameStep {
|
||||
name: String,
|
||||
current: u32,
|
||||
total: u32,
|
||||
current: u64,
|
||||
total: u64,
|
||||
}
|
||||
|
||||
impl VariableNameStep {
|
||||
pub fn new(name: impl Into<String>, current: u32, total: u32) -> Self {
|
||||
pub fn new(name: impl Into<String>, current: u64, total: u64) -> Self {
|
||||
Self { name: name.into(), current, total }
|
||||
}
|
||||
}
|
||||
@@ -235,11 +235,11 @@ impl Step for VariableNameStep {
|
||||
self.name.clone().into()
|
||||
}
|
||||
|
||||
fn current(&self) -> u32 {
|
||||
fn current(&self) -> u64 {
|
||||
self.current
|
||||
}
|
||||
|
||||
fn total(&self) -> u32 {
|
||||
fn total(&self) -> u64 {
|
||||
self.total
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,15 +15,15 @@ license.workspace = true
|
||||
big_s = "1.0.2"
|
||||
bimap = { version = "0.6.3", features = ["serde"] }
|
||||
bincode = "1.3.3"
|
||||
bstr = "1.11.3"
|
||||
bytemuck = { version = "1.21.0", features = ["extern_crate_alloc"] }
|
||||
bstr = "1.12.0"
|
||||
bytemuck = { version = "1.23.1", features = ["extern_crate_alloc"] }
|
||||
byteorder = "1.5.0"
|
||||
charabia = { version = "0.9.6", default-features = false }
|
||||
concat-arrays = "0.1.2"
|
||||
convert_case = "0.6.0"
|
||||
convert_case = "0.8.0"
|
||||
crossbeam-channel = "0.5.15"
|
||||
deserr = "0.6.3"
|
||||
either = { version = "1.13.0", features = ["serde"] }
|
||||
either = { version = "1.15.0", features = ["serde"] }
|
||||
flatten-serde-json = { path = "../flatten-serde-json" }
|
||||
fst = "0.4.7"
|
||||
fxhash = "0.2.1"
|
||||
@@ -36,32 +36,32 @@ heed = { version = "0.22.0", default-features = false, features = [
|
||||
"serde-json",
|
||||
"serde-bincode",
|
||||
] }
|
||||
indexmap = { version = "2.7.0", features = ["serde"] }
|
||||
indexmap = { version = "2.9.0", features = ["serde"] }
|
||||
json-depth-checker = { path = "../json-depth-checker" }
|
||||
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
||||
memchr = "2.7.4"
|
||||
memchr = "2.7.5"
|
||||
memmap2 = "0.9.5"
|
||||
obkv = "0.3.0"
|
||||
once_cell = "1.20.2"
|
||||
ordered-float = "4.6.0"
|
||||
once_cell = "1.21.3"
|
||||
ordered-float = "5.0.0"
|
||||
rayon = "1.10.0"
|
||||
roaring = { version = "0.10.10", features = ["serde"] }
|
||||
roaring = { version = "0.10.12", features = ["serde"] }
|
||||
rstar = { version = "0.12.2", features = ["serde"] }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
serde_json = { version = "1.0.135", features = ["preserve_order", "raw_value"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.140", features = ["preserve_order", "raw_value"] }
|
||||
slice-group-by = "0.3.1"
|
||||
smallstr = { version = "0.3.0", features = ["serde"] }
|
||||
smallvec = "1.13.2"
|
||||
smallvec = "1.15.1"
|
||||
smartstring = "1.0.1"
|
||||
tempfile = "3.15.0"
|
||||
thiserror = "2.0.9"
|
||||
time = { version = "0.3.37", features = [
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
time = { version = "0.3.41", features = [
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"parsing",
|
||||
"macros",
|
||||
] }
|
||||
uuid = { version = "1.11.0", features = ["v4"] }
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
|
||||
filter-parser = { path = "../filter-parser" }
|
||||
|
||||
@@ -69,51 +69,50 @@ filter-parser = { path = "../filter-parser" }
|
||||
itertools = "0.14.0"
|
||||
|
||||
csv = "1.3.1"
|
||||
candle-core = { version = "0.8.2" }
|
||||
candle-transformers = { version = "0.8.2" }
|
||||
candle-nn = { version = "0.8.2" }
|
||||
candle-core = { version = "0.9.1" }
|
||||
candle-transformers = { version = "0.9.1" }
|
||||
candle-nn = { version = "0.9.1" }
|
||||
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default-features = false, features = [
|
||||
"onig",
|
||||
] }
|
||||
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default-features = false, features = [
|
||||
"online",
|
||||
] }
|
||||
tiktoken-rs = "0.6.0"
|
||||
liquid = "0.26.9"
|
||||
rhai = { git = "https://github.com/rhaiscript/rhai", rev = "ef3df63121d27aacd838f366f2b83fd65f20a1e4", features = [
|
||||
tiktoken-rs = "0.7.0"
|
||||
liquid = "0.26.11"
|
||||
rhai = { version = "1.22.2", features = [
|
||||
"serde",
|
||||
"no_module",
|
||||
"no_custom_syntax",
|
||||
"no_time",
|
||||
"sync",
|
||||
] }
|
||||
arroy = "0.6.1"
|
||||
# arroy = "0.6.1"
|
||||
arroy = { git = "https://github.com/meilisearch/arroy.git", rev = "a63f0979b216dde10d50fdfa4fadcb2b1dea73c7" } # incremental update
|
||||
rand = "0.8.5"
|
||||
tracing = "0.1.41"
|
||||
ureq = { version = "2.12.1", features = ["json"] }
|
||||
url = "2.5.4"
|
||||
rayon-par-bridge = "0.1.0"
|
||||
hashbrown = "0.15.2"
|
||||
bumpalo = "3.16.0"
|
||||
hashbrown = "0.15.4"
|
||||
bumpalo = "3.18.1"
|
||||
bumparaw-collections = "0.1.4"
|
||||
thread_local = "1.1.8"
|
||||
allocator-api2 = "0.2.21"
|
||||
rustc-hash = "2.1.0"
|
||||
uell = "0.1.0"
|
||||
thread_local = "1.1.9"
|
||||
allocator-api2 = "0.3.0"
|
||||
rustc-hash = "2.1.1"
|
||||
enum-iterator = "2.1.0"
|
||||
bbqueue = { git = "https://github.com/meilisearch/bbqueue" }
|
||||
flume = { version = "0.11.1", default-features = false }
|
||||
utoipa = { version = "5.3.1", features = [
|
||||
utoipa = { version = "5.4.0", features = [
|
||||
"non_strict_integers",
|
||||
"preserve_order",
|
||||
"uuid",
|
||||
"time",
|
||||
"openapi_extensions",
|
||||
] }
|
||||
lru = "0.13.0"
|
||||
lru = "0.14.0"
|
||||
|
||||
[dev-dependencies]
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
mimalloc = { version = "0.1.47", default-features = false }
|
||||
# fixed version due to format breakages in v1.40
|
||||
insta = "=1.39.0"
|
||||
maplit = "1.0.2"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user