Compare commits

..

6 Commits

Author SHA1 Message Date
ManyTheFish
d57026cd96 Support synonyms sinergies 2023-07-25 15:01:42 +02:00
ManyTheFish
41c9e8856a Fix test 2023-07-25 10:55:37 +02:00
ManyTheFish
d4ff59fcf5 Fix clippy 2023-07-24 18:42:26 +02:00
ManyTheFish
9c485f8563 Make the search and the indexing work 2023-07-24 18:35:20 +02:00
ManyTheFish
d8d12d5979 Be able to set and reset settings 2023-07-24 17:00:18 +02:00
ManyTheFish
0597a97c84 Update tests 2023-07-20 11:15:10 +02:00
176 changed files with 2691 additions and 2385 deletions

View File

@@ -35,7 +35,7 @@ jobs:
- name: Build deb package
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
- name: Upload debian pkg to release
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.6.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/debian/meilisearch.deb

View File

@@ -54,7 +54,7 @@ jobs:
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.6.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/release/meilisearch
@@ -87,7 +87,7 @@ jobs:
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.6.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/release/${{ matrix.artifact_name }}
@@ -121,7 +121,7 @@ jobs:
- name: Upload the binary to release
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.6.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/${{ matrix.target }}/release/meilisearch
@@ -183,7 +183,7 @@ jobs:
- name: Upload the binary to release
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.6.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/${{ matrix.target }}/release/meilisearch

View File

@@ -37,13 +37,13 @@ jobs:
toolchain: stable
override: true
- name: Setup test with Rust nightly
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
if: github.event_name == 'schedule'
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.5.1
uses: Swatinem/rust-cache@v2.5.0
- name: Run cargo check without any default features
uses: actions-rs/cargo@v1
with:
@@ -65,7 +65,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.5.1
uses: Swatinem/rust-cache@v2.5.0
- name: Run cargo check without any default features
uses: actions-rs/cargo@v1
with:
@@ -78,12 +78,12 @@ jobs:
args: --locked --release --all
test-all-features:
name: Tests all features
name: Tests all features on cron schedule only
runs-on: ubuntu-latest
container:
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
image: ubuntu:18.04
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
if: github.event_name == 'schedule'
steps:
- uses: actions/checkout@v3
- name: Install needed dependencies
@@ -110,7 +110,7 @@ jobs:
runs-on: ubuntu-latest
container:
image: ubuntu:18.04
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
if: github.event_name == 'schedule'
steps:
- uses: actions/checkout@v3
- name: Install needed dependencies
@@ -146,7 +146,7 @@ jobs:
toolchain: stable
override: true
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.5.1
uses: Swatinem/rust-cache@v2.5.0
- name: Run tests in debug
uses: actions-rs/cargo@v1
with:
@@ -165,7 +165,7 @@ jobs:
override: true
components: clippy
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.5.1
uses: Swatinem/rust-cache@v2.5.0
- name: Run cargo clippy
uses: actions-rs/cargo@v1
with:
@@ -184,7 +184,7 @@ jobs:
override: true
components: rustfmt
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.5.1
uses: Swatinem/rust-cache@v2.5.0
- name: Run cargo fmt
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate

359
Cargo.lock generated
View File

@@ -8,7 +8,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "617a8268e3537fe1d8c9ead925fca49ef6400927ee7bc26750e90ecee14ce4b8"
dependencies = [
"bitflags 1.3.2",
"bitflags",
"bytes",
"futures-core",
"futures-sink",
@@ -47,7 +47,7 @@ dependencies = [
"actix-utils",
"ahash 0.8.3",
"base64 0.21.2",
"bitflags 1.3.2",
"bitflags",
"brotli",
"bytes",
"bytestring",
@@ -405,7 +405,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -416,7 +416,7 @@ checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -428,6 +428,17 @@ dependencies = [
"critical-section",
]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi 0.1.19",
"libc",
"winapi",
]
[[package]]
name = "autocfg"
version = "1.1.0"
@@ -516,12 +527,6 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42"
[[package]]
name = "block-buffer"
version = "0.10.4"
@@ -603,7 +608,7 @@ checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -700,15 +705,16 @@ dependencies = [
[[package]]
name = "charabia"
version = "0.8.2"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57aa1b4a8dda126c03ebf2f7e31d16cfc8781c2fe80dedd1a33459efc3e07578"
checksum = "bb49850f555eb71aa6fc6d4d79420e81f4d89fa56e0e9c0f6d19aace2f56c554"
dependencies = [
"aho-corasick",
"cow-utils",
"csv",
"deunicode",
"either",
"finl_unicode",
"fst",
"irg-kvariants",
"jieba-rs",
@@ -761,6 +767,18 @@ dependencies = [
"inout",
]
[[package]]
name = "clap"
version = "3.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
dependencies = [
"bitflags",
"clap_lex 0.2.4",
"indexmap",
"textwrap",
]
[[package]]
name = "clap"
version = "4.3.0"
@@ -780,8 +798,8 @@ checksum = "4f423e341edefb78c9caba2d9c7f7687d0e72e89df3ce3394554754393ac3990"
dependencies = [
"anstream",
"anstyle",
"bitflags 1.3.2",
"clap_lex",
"bitflags",
"clap_lex 0.5.0",
"strsim",
]
@@ -794,7 +812,16 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
name = "clap_lex"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
dependencies = [
"os_str_bytes",
]
[[package]]
@@ -902,19 +929,19 @@ dependencies = [
[[package]]
name = "criterion"
version = "0.5.1"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb"
dependencies = [
"anes",
"atty",
"cast",
"ciborium",
"clap",
"clap 3.2.25",
"criterion-plot",
"is-terminal",
"itertools",
"lazy_static",
"num-traits",
"once_cell",
"oorandom",
"plotters",
"rayon",
@@ -1021,9 +1048,9 @@ dependencies = [
[[package]]
name = "csv"
version = "1.2.2"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086"
checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad"
dependencies = [
"csv-core",
"itoa",
@@ -1197,6 +1224,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "doc-comment"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "dump"
version = "1.3.0"
@@ -1336,7 +1369,7 @@ checksum = "eecf8589574ce9b895052fa12d69af7a233f99e6107f5cb8dd1044f2a17bfdcb"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -1352,12 +1385,6 @@ dependencies = [
"termcolor",
]
[[package]]
name = "equivalent"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
[[package]]
name = "errno"
version = "0.3.1"
@@ -1442,6 +1469,12 @@ dependencies = [
"nom_locate",
]
[[package]]
name = "finl_unicode"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6"
[[package]]
name = "flate2"
version = "1.0.26"
@@ -1537,7 +1570,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -1575,7 +1608,7 @@ name = "fuzzers"
version = "1.3.0"
dependencies = [
"arbitrary",
"clap",
"clap 4.3.0",
"fastrand",
"milli",
"serde",
@@ -1643,7 +1676,7 @@ version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf7f68c2995f392c49fffb4f95ae2c873297830eb25c6bc4c114ce8f4562acc"
dependencies = [
"bitflags 1.3.2",
"bitflags",
"libc",
"libgit2-sys",
"log",
@@ -1679,7 +1712,7 @@ dependencies = [
"futures-sink",
"futures-util",
"http",
"indexmap 1.9.3",
"indexmap",
"slab",
"tokio",
"tokio-util",
@@ -1703,18 +1736,21 @@ dependencies = [
[[package]]
name = "hashbrown"
version = "0.12.3"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
"ahash 0.7.6",
]
[[package]]
name = "hashbrown"
version = "0.14.0"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
dependencies = [
"ahash 0.7.6",
]
[[package]]
name = "heapless"
@@ -1737,8 +1773,8 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
[[package]]
name = "heed"
version = "0.12.7"
source = "git+https://github.com/meilisearch/heed?tag=v0.12.7#061a5276b1f336f5f3302bee291e336041d88632"
version = "0.12.5"
source = "git+https://github.com/meilisearch/heed?tag=v0.12.6#8c5b94225fc949c02bb7b900cc50ffaf6b584b1e"
dependencies = [
"byteorder",
"heed-traits",
@@ -1755,12 +1791,12 @@ dependencies = [
[[package]]
name = "heed-traits"
version = "0.7.0"
source = "git+https://github.com/meilisearch/heed?tag=v0.12.7#061a5276b1f336f5f3302bee291e336041d88632"
source = "git+https://github.com/meilisearch/heed?tag=v0.12.6#8c5b94225fc949c02bb7b900cc50ffaf6b584b1e"
[[package]]
name = "heed-types"
version = "0.7.2"
source = "git+https://github.com/meilisearch/heed?tag=v0.12.7#061a5276b1f336f5f3302bee291e336041d88632"
source = "git+https://github.com/meilisearch/heed?tag=v0.12.6#8c5b94225fc949c02bb7b900cc50ffaf6b584b1e"
dependencies = [
"bincode",
"heed-traits",
@@ -1769,6 +1805,15 @@ dependencies = [
"zerocopy",
]
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "hermit-abi"
version = "0.2.6"
@@ -1799,6 +1844,22 @@ dependencies = [
"digest",
]
[[package]]
name = "hnsw"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b9740ebf8769ec4ad6762cc951ba18f39bba6dfbc2fbbe46285f7539af79752"
dependencies = [
"ahash 0.7.6",
"hashbrown 0.11.2",
"libm",
"num-traits",
"rand_core",
"serde",
"smallvec",
"space",
]
[[package]]
name = "http"
version = "0.2.9"
@@ -1934,16 +1995,6 @@ dependencies = [
"serde",
]
[[package]]
name = "indexmap"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
dependencies = [
"equivalent",
"hashbrown 0.14.0",
]
[[package]]
name = "inout"
version = "0.1.3"
@@ -1978,21 +2029,6 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "instant-distance"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c619cdaa30bb84088963968bee12a45ea5fbbf355f2c021bcd15589f5ca494a"
dependencies = [
"num_cpus",
"ordered-float",
"parking_lot",
"rand",
"rayon",
"serde",
"serde-big-array",
]
[[package]]
name = "io-lifetimes"
version = "1.0.11"
@@ -2023,12 +2059,13 @@ dependencies = [
[[package]]
name = "is-terminal"
version = "0.4.9"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f"
dependencies = [
"hermit-abi 0.3.1",
"rustix 0.38.4",
"io-lifetimes",
"rustix 0.37.19",
"windows-sys 0.48.0",
]
@@ -2125,9 +2162,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.147"
version = "0.2.144"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1"
[[package]]
name = "libgit2-sys"
@@ -2171,9 +2208,9 @@ dependencies = [
[[package]]
name = "lindera-cc-cedict-builder"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d2e8f2ca97ddf952fe340642511b9c14b373cb2eef711d526bb8ef2ca0969b8"
checksum = "4c6bf79b29a90bcd22036e494d6cc9ac3abe9ab604b21f3258ba6dc1ce501801"
dependencies = [
"anyhow",
"bincode",
@@ -2190,9 +2227,9 @@ dependencies = [
[[package]]
name = "lindera-compress"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f72b460559bcbe8a9cee85ea4a5056133ed3abf373031191589236e656d65b59"
checksum = "8f2e99e67736352bbb6ed1c273643975822505067ca32194b0981040bc50527a"
dependencies = [
"anyhow",
"flate2",
@@ -2201,9 +2238,9 @@ dependencies = [
[[package]]
name = "lindera-core"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f586eb8a9393c32d5525e0e9336a3727bd1329674740097126f3b0bff8a1a1ea"
checksum = "7c3935e966409156f22cb4b334b21b0dce84b7aa1cad62214b466489d249c8e5"
dependencies = [
"anyhow",
"bincode",
@@ -2218,9 +2255,9 @@ dependencies = [
[[package]]
name = "lindera-decompress"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fb1facd8da698072fcc7338bd757730db53d59f313f44dd583fa03681dcc0e1"
checksum = "7476406abb63c49d7f59c88b9b868ee8d2981495ea7e2c3ad129902f9916b3c6"
dependencies = [
"anyhow",
"flate2",
@@ -2229,9 +2266,9 @@ dependencies = [
[[package]]
name = "lindera-dictionary"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec7be7410b1da7017a8948986b87af67082f605e9a716f0989790d795d677f0c"
checksum = "808b7d2b3cabc25a4022526d484a4cfd1d5924dc76a26e0379707698841acef2"
dependencies = [
"anyhow",
"bincode",
@@ -2249,9 +2286,9 @@ dependencies = [
[[package]]
name = "lindera-ipadic-builder"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "705d07f8a45d04fd95149f7ad41a26d1f9e56c9c00402be6f9dd05e3d88b99c6"
checksum = "31f373a280958c930e5ee4a1e4db3a0ee0542afaf02d3b5cacb8cab4e298648e"
dependencies = [
"anyhow",
"bincode",
@@ -2270,9 +2307,9 @@ dependencies = [
[[package]]
name = "lindera-ipadic-neologd-builder"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "633a93983ba13fba42328311a501091bd4a7aff0c94ae9eaa9d4733dd2b0468a"
checksum = "92eff98e9ed1a7a412b91709c2343457a04ef02fa0c27c27e3a5892f5591eae9"
dependencies = [
"anyhow",
"bincode",
@@ -2291,9 +2328,9 @@ dependencies = [
[[package]]
name = "lindera-ko-dic"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a428e0d316b6c86f51bd919479692bc41ad840dba266ebc044663970f431ea18"
checksum = "74c6d5bf7d8092bd6d10de7a5d74b70ea7cf234586235b0d6cdb903b05a6c9e2"
dependencies = [
"bincode",
"byteorder",
@@ -2308,9 +2345,9 @@ dependencies = [
[[package]]
name = "lindera-ko-dic-builder"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a5288704c6b8a069c0a1705c38758e836497698b50453373ab3d56c6f9a7ef8"
checksum = "f0a4add6d3c1e41ec9e2690d33e287d0223fb59a30ccee4980c23f31368cae1e"
dependencies = [
"anyhow",
"bincode",
@@ -2328,9 +2365,9 @@ dependencies = [
[[package]]
name = "lindera-tokenizer"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "106ba439b2e87529d9bbedbb88d69f635baba1195c26502b308f55a85885fc81"
checksum = "cb6a8acbd068019d1cdac7316f0dcb87f8e33ede2b13aa237f45114f9750afb8"
dependencies = [
"bincode",
"byteorder",
@@ -2343,9 +2380,9 @@ dependencies = [
[[package]]
name = "lindera-unidic"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3399b6dcfe1701333451d184ff3c677f433b320153427b146360c9e4bd8cb816"
checksum = "14abf0613d350b30d3b0406a33b1de8fa8d829f26516909421702174785991c8"
dependencies = [
"bincode",
"byteorder",
@@ -2360,9 +2397,9 @@ dependencies = [
[[package]]
name = "lindera-unidic-builder"
version = "0.27.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b698227fdaeac32289173ab389b990d4eb00a40cbc9912020f69a0c491dabf55"
checksum = "e204ed53d9bd63227d1e6a6c1f122ca039e00a8634ac32e7fb0281eeec8615c4"
dependencies = [
"anyhow",
"bincode",
@@ -2396,12 +2433,6 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]]
name = "linux-raw-sys"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
[[package]]
name = "lmdb-rkv-sys"
version = "0.15.1"
@@ -2442,9 +2473,9 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.19"
version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de"
[[package]]
name = "logging_timer"
@@ -2483,7 +2514,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -2521,12 +2552,13 @@ dependencies = [
"assert-json-diff",
"async-stream",
"async-trait",
"atty",
"brotli",
"bstr",
"byte-unit",
"bytes",
"cargo_toml",
"clap",
"clap 4.3.0",
"crossbeam-channel",
"deserr",
"dump",
@@ -2540,9 +2572,8 @@ dependencies = [
"hex",
"http",
"index-scheduler",
"indexmap 1.9.3",
"indexmap",
"insta",
"is-terminal",
"itertools",
"jsonwebtoken",
"lazy_static",
@@ -2694,9 +2725,9 @@ dependencies = [
"geoutils",
"grenad",
"heed",
"indexmap 1.9.3",
"hnsw",
"indexmap",
"insta",
"instant-distance",
"itertools",
"json-depth-checker",
"levenshtein_automata",
@@ -2721,6 +2752,7 @@ dependencies = [
"smallstr",
"smallvec",
"smartstring",
"space",
"tempfile",
"thiserror",
"time",
@@ -2881,9 +2913,9 @@ checksum = "f69e48cd7c8e5bb52a1da1287fdbfd877c32673176583ce664cd63b201aba385"
[[package]]
name = "once_cell"
version = "1.18.0"
version = "1.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
[[package]]
name = "oorandom"
@@ -2900,6 +2932,12 @@ dependencies = [
"num-traits",
]
[[package]]
name = "os_str_bytes"
version = "6.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267"
[[package]]
name = "page_size"
version = "0.4.2"
@@ -3040,7 +3078,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -3198,7 +3236,7 @@ version = "0.14.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1de8dacb0873f77e6aefc6d71e044761fcc68060290f5b1089fcdf84626bb69"
dependencies = [
"bitflags 1.3.2",
"bitflags",
"byteorder",
"hex",
"lazy_static",
@@ -3259,9 +3297,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.30"
version = "1.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5907a1b7c277254a8b15170f6e7c97cfa60ee7872a3217663bb81151e48184bb"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
dependencies = [
"proc-macro2",
]
@@ -3334,7 +3372,7 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags 1.3.2",
"bitflags",
]
[[package]]
@@ -3343,7 +3381,7 @@ version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
dependencies = [
"bitflags 1.3.2",
"bitflags",
]
[[package]]
@@ -3485,7 +3523,7 @@ version = "0.36.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14e4d67015953998ad0eb82887a0eb0129e18a7e2f3b7b0f6c422fddcd503d62"
dependencies = [
"bitflags 1.3.2",
"bitflags",
"errno",
"io-lifetimes",
"libc",
@@ -3499,7 +3537,7 @@ version = "0.37.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d"
dependencies = [
"bitflags 1.3.2",
"bitflags",
"errno",
"io-lifetimes",
"libc",
@@ -3507,19 +3545,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "rustix"
version = "0.38.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5"
dependencies = [
"bitflags 2.3.3",
"errno",
"libc",
"linux-raw-sys 0.4.5",
"windows-sys 0.48.0",
]
[[package]]
name = "rustls"
version = "0.20.8"
@@ -3622,22 +3647,13 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed"
[[package]]
name = "serde"
version = "1.0.180"
version = "1.0.163"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ea67f183f058fe88a4e3ec6e2788e003840893b91bac4559cabedd00863b3ed"
checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde-big-array"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11fc7cc2c76d73e0f27ee52abbd64eec84d46f370c88371120433196934e4b7f"
dependencies = [
"serde",
]
[[package]]
name = "serde-cs"
version = "0.2.4"
@@ -3649,22 +3665,22 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.180"
version = "1.0.163"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24e744d7782b686ab3b73267ef05697159cc0e5abbed3f47f9933165e5219036"
checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
name = "serde_json"
version = "1.0.104"
version = "1.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c"
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
dependencies = [
"indexmap 2.0.0",
"indexmap",
"itoa",
"ryu",
"serde",
@@ -3787,6 +3803,9 @@ name = "smallvec"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
dependencies = [
"serde",
]
[[package]]
name = "smartstring"
@@ -3809,6 +3828,16 @@ dependencies = [
"winapi",
]
[[package]]
name = "space"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5ab9701ae895386d13db622abf411989deff7109b13b46b6173bb4ce5c1d123"
dependencies = [
"doc-comment",
"num-traits",
]
[[package]]
name = "spin"
version = "0.5.2"
@@ -3872,9 +3901,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.28"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
dependencies = [
"proc-macro2",
"quote",
@@ -3960,23 +3989,29 @@ dependencies = [
]
[[package]]
name = "thiserror"
version = "1.0.44"
name = "textwrap"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90"
checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
[[package]]
name = "thiserror"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.44"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96"
checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -4058,7 +4093,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
]
[[package]]
@@ -4134,7 +4169,7 @@ version = "0.19.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739"
dependencies = [
"indexmap 1.9.3",
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
@@ -4383,7 +4418,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
"wasm-bindgen-shared",
]
@@ -4417,7 +4452,7 @@ checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.18",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]

View File

@@ -65,7 +65,7 @@ You may also want to check out [Meilisearch 101](https://www.meilisearch.com/doc
## ⚡ Supercharge your Meilisearch experience
Say goodbye to server deployment and manual updates with [Meilisearch Cloud](https://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=github&utm_medium=meilisearch). No credit card required.
Say goodbye to server deployment and manual updates with [Meilisearch Cloud](https://www.meilisearch.com/pricing?utm_campaign=oss&utm_source=engine&utm_medium=meilisearch). Get started with a 14-day free trial! No credit card required.
## 🧰 SDKs & integration tools
@@ -87,7 +87,7 @@ Finally, for more in-depth information, refer to our articles explaining fundame
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) of our documentation.

View File

@@ -14,11 +14,11 @@ license.workspace = true
anyhow = "1.0.70"
csv = "1.2.1"
milli = { path = "../milli" }
mimalloc = { version = "0.1.37", default-features = false }
mimalloc = { version = "0.1.36", default-features = false }
serde_json = { version = "1.0.95", features = ["preserve_order"] }
[dev-dependencies]
criterion = { version = "0.5.1", features = ["html_reports"] }
criterion = { version = "0.4.0", features = ["html_reports"] }
rand = "0.8.5"
rand_chacha = "0.3.1"
roaring = "0.10.1"

View File

@@ -210,7 +210,6 @@ pub(crate) mod test {
use big_s::S;
use maplit::{btreemap, btreeset};
use meilisearch_types::facet_values_sort::FacetValuesSort;
use meilisearch_types::features::RuntimeTogglableFeatures;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::keys::{Action, Key};
use meilisearch_types::milli;
@@ -262,6 +261,9 @@ pub(crate) mod test {
sortable_attributes: Setting::Set(btreeset! { S("age") }),
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,
separator_tokens: Setting::NotSet,
dictionary: Setting::NotSet,
synonyms: Setting::NotSet,
distinct_attribute: Setting::NotSet,
typo_tolerance: Setting::NotSet,
@@ -419,10 +421,7 @@ pub(crate) mod test {
}
keys.flush().unwrap();
// ========== experimental features
let features = create_test_features();
dump.create_experimental_features(features).unwrap();
// ========== TODO: create features here
// create the dump
let mut file = tempfile::tempfile().unwrap();
@@ -432,10 +431,6 @@ pub(crate) mod test {
file
}
fn create_test_features() -> RuntimeTogglableFeatures {
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
}
#[test]
fn test_creating_and_read_dump() {
let mut file = create_test_dump();
@@ -480,9 +475,5 @@ pub(crate) mod test {
for (key, expected) in dump.keys().unwrap().zip(create_test_api_keys()) {
assert_eq!(key.unwrap(), expected);
}
// ==== checking the features
let expected = create_test_features();
assert_eq!(dump.features().unwrap().unwrap(), expected);
}
}

View File

@@ -340,6 +340,9 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
}
},
stop_words: settings.stop_words.into(),
non_separator_tokens: v6::Setting::NotSet,
separator_tokens: v6::Setting::NotSet,
dictionary: v6::Setting::NotSet,
synonyms: settings.synonyms.into(),
distinct_attribute: settings.distinct_attribute.into(),
typo_tolerance: match settings.typo_tolerance {

View File

@@ -195,53 +195,8 @@ pub(crate) mod test {
use meili_snap::insta;
use super::*;
use crate::reader::v6::RuntimeTogglableFeatures;
#[test]
fn import_dump_v6_experimental() {
let dump = File::open("tests/assets/v6-with-experimental.dump").unwrap();
let mut dump = DumpReader::open(dump).unwrap();
// top level infos
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-07-06 7:10:27.21958 +00:00:00");
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
// tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"d45cd8571703e58ae53c7bd7ce3f5c22");
assert_eq!(update_files.len(), 2);
assert!(update_files[0].is_none()); // the dump creation
assert!(update_files[1].is_none()); // the processed document addition
// keys
let keys = dump.keys().unwrap().collect::<Result<Vec<_>>>().unwrap();
meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"13c2da155e9729c2344688cab29af71d");
// indexes
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
// the index are not ordered in any way by default
indexes.sort_by_key(|index| index.metadata().uid.to_string());
let mut test = indexes.pop().unwrap();
assert!(indexes.is_empty());
insta::assert_json_snapshot!(test.metadata(), @r###"
{
"uid": "test",
"primaryKey": "id",
"createdAt": "2023-07-06T07:07:41.364694Z",
"updatedAt": "2023-07-06T07:07:41.396114Z"
}
"###);
assert_eq!(test.documents().unwrap().count(), 1);
assert_eq!(
dump.features().unwrap().unwrap(),
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
);
}
// TODO: add `features` to tests
#[test]
fn import_dump_v5() {
@@ -319,8 +274,6 @@ pub(crate) mod test {
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
assert_eq!(documents.len(), 10);
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
assert_eq!(dump.features().unwrap(), None);
}
#[test]

View File

@@ -292,7 +292,6 @@ pub(crate) mod test {
│ ├---- update_files/
│ │ └---- 1.jsonl
│ └---- queue.jsonl
├---- experimental-features.json
├---- instance_uid.uuid
├---- keys.jsonl
└---- metadata.json

View File

@@ -472,77 +472,6 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
terminated(|input| parse_expression(input, 0), eof)(input)
}
impl<'a> std::fmt::Display for FilterCondition<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FilterCondition::Not(filter) => {
write!(f, "NOT ({filter})")
}
FilterCondition::Condition { fid, op } => {
write!(f, "{fid} {op}")
}
FilterCondition::In { fid, els } => {
write!(f, "{fid} IN[")?;
for el in els {
write!(f, "{el}, ")?;
}
write!(f, "]")
}
FilterCondition::Or(els) => {
write!(f, "OR[")?;
for el in els {
write!(f, "{el}, ")?;
}
write!(f, "]")
}
FilterCondition::And(els) => {
write!(f, "AND[")?;
for el in els {
write!(f, "{el}, ")?;
}
write!(f, "]")
}
FilterCondition::GeoLowerThan { point, radius } => {
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
}
FilterCondition::GeoBoundingBox {
top_right_point: top_left_point,
bottom_left_point: bottom_right_point,
} => {
write!(
f,
"_geoBoundingBox([{}, {}], [{}, {}])",
top_left_point[0],
top_left_point[1],
bottom_right_point[0],
bottom_right_point[1]
)
}
}
}
}
impl<'a> std::fmt::Display for Condition<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Condition::GreaterThan(token) => write!(f, "> {token}"),
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
Condition::Equal(token) => write!(f, "= {token}"),
Condition::NotEqual(token) => write!(f, "!= {token}"),
Condition::Null => write!(f, "IS NULL"),
Condition::Empty => write!(f, "IS EMPTY"),
Condition::Exists => write!(f, "EXISTS"),
Condition::LowerThan(token) => write!(f, "< {token}"),
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
Condition::Between { from, to } => write!(f, "{from} TO {to}"),
}
}
}
impl<'a> std::fmt::Display for Token<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{{{}}}", self.value())
}
}
#[cfg(test)]
pub mod tests {
use super::*;
@@ -923,3 +852,74 @@ pub mod tests {
assert_eq!(token.value(), s);
}
}
impl<'a> std::fmt::Display for FilterCondition<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FilterCondition::Not(filter) => {
write!(f, "NOT ({filter})")
}
FilterCondition::Condition { fid, op } => {
write!(f, "{fid} {op}")
}
FilterCondition::In { fid, els } => {
write!(f, "{fid} IN[")?;
for el in els {
write!(f, "{el}, ")?;
}
write!(f, "]")
}
FilterCondition::Or(els) => {
write!(f, "OR[")?;
for el in els {
write!(f, "{el}, ")?;
}
write!(f, "]")
}
FilterCondition::And(els) => {
write!(f, "AND[")?;
for el in els {
write!(f, "{el}, ")?;
}
write!(f, "]")
}
FilterCondition::GeoLowerThan { point, radius } => {
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
}
FilterCondition::GeoBoundingBox {
top_right_point: top_left_point,
bottom_left_point: bottom_right_point,
} => {
write!(
f,
"_geoBoundingBox([{}, {}], [{}, {}])",
top_left_point[0],
top_left_point[1],
bottom_right_point[0],
bottom_right_point[1]
)
}
}
}
}
impl<'a> std::fmt::Display for Condition<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Condition::GreaterThan(token) => write!(f, "> {token}"),
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
Condition::Equal(token) => write!(f, "= {token}"),
Condition::NotEqual(token) => write!(f, "!= {token}"),
Condition::Null => write!(f, "IS NULL"),
Condition::Empty => write!(f, "IS EMPTY"),
Condition::Exists => write!(f, "EXISTS"),
Condition::LowerThan(token) => write!(f, "< {token}"),
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
Condition::Between { from, to } => write!(f, "{from} TO {to}"),
}
}
}
impl<'a> std::fmt::Display for Token<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{{{}}}", self.value())
}
}

View File

@@ -16,7 +16,7 @@ license.workspace = true
serde_json = "1.0"
[dev-dependencies]
criterion = { version = "0.5.1", features = ["html_reports"] }
criterion = { version = "0.4.0", features = ["html_reports"] }
[[bench]]
name = "benchmarks"

View File

@@ -138,12 +138,6 @@ impl Query {
index_vec.push(index_uid);
Self { index_uids: Some(index_vec), ..self }
}
// Removes the `from` and `limit` restrictions from the query.
// Useful to get the total number of tasks matching a filter.
pub fn without_limits(self) -> Self {
Query { limit: None, from: None, ..self }
}
}
#[derive(Debug, Clone)]
@@ -813,11 +807,6 @@ impl IndexScheduler {
Ok(res)
}
// Return true if there is at least one task that is processing.
pub fn is_task_processing(&self) -> Result<bool> {
Ok(!self.processing_tasks.read().unwrap().processing.is_empty())
}
/// Return true iff there is at least one task associated with this index
/// that is processing.
pub fn is_index_processing(&self, index: &str) -> Result<bool> {
@@ -828,8 +817,7 @@ impl IndexScheduler {
Ok(nbr_index_processing_tasks > 0)
}
/// Return the task ids matching the query along with the total number of tasks
/// by ignoring the from and limit parameters from the user's point of view.
/// Return the task ids matching the query from the user's point of view.
///
/// There are two differences between an internal query and a query executed by
/// the user.
@@ -842,13 +830,7 @@ impl IndexScheduler {
rtxn: &RoTxn,
query: &Query,
filters: &meilisearch_auth::AuthFilter,
) -> Result<(RoaringBitmap, u64)> {
// compute all tasks matching the filter by ignoring the limits, to find the number of tasks matching
// the filter.
// As this causes us to compute the filter twice it is slightly inefficient, but doing it this way spares
// us from modifying the underlying implementation, and the performance remains sufficient.
// Should this change, we would modify `get_task_ids` to directly return the number of matching tasks.
let total_tasks = self.get_task_ids(rtxn, &query.clone().without_limits())?;
) -> Result<RoaringBitmap> {
let mut tasks = self.get_task_ids(rtxn, query)?;
// If the query contains a list of index uid or there is a finite list of authorized indexes,
@@ -871,11 +853,10 @@ impl IndexScheduler {
}
}
Ok((tasks, total_tasks.len()))
Ok(tasks)
}
/// Return the tasks matching the query from the user's point of view along
/// with the total number of tasks matching the query, ignoring from and limit.
/// Return the tasks matching the query from the user's point of view.
///
/// There are two differences between an internal query and a query executed by
/// the user.
@@ -887,10 +868,11 @@ impl IndexScheduler {
&self,
query: Query,
filters: &meilisearch_auth::AuthFilter,
) -> Result<(Vec<Task>, u64)> {
) -> Result<Vec<Task>> {
let rtxn = self.env.read_txn()?;
let (tasks, total) = self.get_task_ids_from_authorized_indexes(&rtxn, &query, filters)?;
let tasks = self.get_task_ids_from_authorized_indexes(&rtxn, &query, filters)?;
let tasks = self.get_existing_tasks(
&rtxn,
tasks.into_iter().rev().take(query.limit.unwrap_or(u32::MAX) as usize),
@@ -901,19 +883,16 @@ impl IndexScheduler {
let ret = tasks.into_iter();
if processing.is_empty() {
Ok((ret.collect(), total))
Ok(ret.collect())
} else {
Ok((
ret.map(|task| {
if processing.contains(task.uid) {
Ok(ret
.map(|task| match processing.contains(task.uid) {
true => {
Task { status: Status::Processing, started_at: Some(started_at), ..task }
} else {
task
}
false => task,
})
.collect(),
total,
))
.collect())
}
}
@@ -1856,17 +1835,6 @@ mod tests {
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_third_task");
}
#[test]
fn test_task_is_processing() {
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
index_scheduler.register(index_creation_task("index_a", "id")).unwrap();
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_a_task");
handle.advance_till([Start, BatchCreated]);
assert!(index_scheduler.is_task_processing().unwrap());
}
/// We send a lot of tasks but notify the tasks scheduler only once as
/// we send them very fast, we must make sure that they are all processed.
#[test]
@@ -2799,43 +2767,43 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { limit: Some(0), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[]");
let query = Query { limit: Some(1), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
let query = Query { limit: Some(2), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]");
let query = Query { from: Some(1), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
let query = Query { from: Some(2), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]");
let query = Query { from: Some(1), limit: Some(1), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[1,]");
let query = Query { from: Some(1), limit: Some(2), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
@@ -2862,13 +2830,13 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,]"); // only the processing tasks in the first tick
let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); // only the enqueued tasks in the first tick
@@ -2877,7 +2845,7 @@ mod tests {
statuses: Some(vec![Status::Enqueued, Status::Processing]),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); // both enqueued and processing tasks in the first tick
@@ -2887,7 +2855,7 @@ mod tests {
after_started_at: Some(start_time),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
@@ -2899,7 +2867,7 @@ mod tests {
before_started_at: Some(start_time),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
@@ -2912,7 +2880,7 @@ mod tests {
before_started_at: Some(start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
@@ -2939,7 +2907,7 @@ mod tests {
before_started_at: Some(start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
@@ -2952,7 +2920,7 @@ mod tests {
before_started_at: Some(start_time),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
@@ -2965,7 +2933,7 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
@@ -2985,7 +2953,7 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// we run the same query to verify that, and indeed find that the last task is matched
@@ -2997,7 +2965,7 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// enqueued, succeeded, or processing tasks started after the second part of the test, should
@@ -3009,7 +2977,7 @@ mod tests {
// now the last task should have failed
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "end");
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// so running the last query should return nothing
@@ -3021,7 +2989,7 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// but the same query on failed tasks should return the last task
@@ -3033,7 +3001,7 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// but the same query on failed tasks should return the last task
@@ -3046,7 +3014,7 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// same query but with an invalid uid
@@ -3059,7 +3027,7 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// same query but with a valid uid
@@ -3091,14 +3059,14 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// only the first task associated with catto is returned, the indexSwap tasks are excluded!
snapshot!(snapshot_bitmap(&tasks), @"[0,]");
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,
@@ -3112,7 +3080,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[]");
let query = Query::default();
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,
@@ -3126,7 +3094,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[1,]");
let query = Query::default();
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,
@@ -3145,7 +3113,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
let query = Query::default();
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// we asked for all the tasks with all index authorized -> all tasks returned
@@ -3178,7 +3146,7 @@ mod tests {
let rtxn = index_scheduler.read_txn().unwrap();
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
// 0 is not returned because it was not canceled, 3 is not returned because it is the uid of the
@@ -3186,7 +3154,7 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]");
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
let (tasks, _) = index_scheduler
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,

View File

@@ -1,36 +0,0 @@
---
source: index-scheduler/src/lib.rs
---
### Autobatching Enabled = true
### Processing Tasks:
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
----------------------------------------------------------------------
### Status:
enqueued [0,]
----------------------------------------------------------------------
### Kind:
"indexCreation" [0,]
----------------------------------------------------------------------
### Index Tasks:
index_a [0,]
----------------------------------------------------------------------
### Index Mapper:
----------------------------------------------------------------------
### Canceled By:
----------------------------------------------------------------------
### Enqueued At:
[timestamp] [0,]
----------------------------------------------------------------------
### Started At:
----------------------------------------------------------------------
### Finished At:
----------------------------------------------------------------------
### File Store:
----------------------------------------------------------------------

View File

@@ -15,7 +15,7 @@ license.workspace = true
serde_json = "1.0"
[dev-dependencies]
criterion = "0.5.1"
criterion = "0.4.0"
[[bench]]
name = "depth"

View File

@@ -199,30 +199,6 @@ macro_rules! snapshot {
};
}
/// Create a string from the value by serializing it as Json, optionally
/// redacting some parts of it.
///
/// The second argument to the macro can be an object expression for redaction.
/// It's in the form { selector => replacement }. For more information about redactions
/// refer to the redactions feature in the `insta` guide.
#[macro_export]
macro_rules! json_string {
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
{
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
snap
}
};
($value:expr) => {{
let value = meili_snap::insta::_macro_support::serialize_value(
&$value,
meili_snap::insta::_macro_support::SerializationFormat::Json,
meili_snap::insta::_macro_support::SnapshotLocation::File
);
value
}};
}
#[cfg(test)]
mod tests {
use crate as meili_snap;
@@ -274,3 +250,27 @@ mod tests {
}
}
}
/// Create a string from the value by serializing it as Json, optionally
/// redacting some parts of it.
///
/// The second argument to the macro can be an object expression for redaction.
/// It's in the form { selector => replacement }. For more information about redactions
/// refer to the redactions feature in the `insta` guide.
#[macro_export]
macro_rules! json_string {
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
{
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
snap
}
};
($value:expr) => {{
let value = meili_snap::insta::_macro_support::serialize_value(
&$value,
meili_snap::insta::_macro_support::SerializationFormat::Json,
meili_snap::insta::_macro_support::SnapshotLocation::File
);
value
}};
}

View File

@@ -1,3 +1,4 @@
use std::borrow::Borrow;
use std::fmt::{self, Debug, Display};
use std::fs::File;
use std::io::{self, Seek, Write};
@@ -41,7 +42,7 @@ impl Display for DocumentFormatError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Io(e) => write!(f, "{e}"),
Self::MalformedPayload(me, b) => match me {
Self::MalformedPayload(me, b) => match me.borrow() {
Error::Json(se) => {
let mut message = match se.classify() {
Category::Data => {

View File

@@ -259,6 +259,9 @@ InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
InvalidSettingsNonSeparatorTokens , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSeparatorTokens , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDictionary , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
InvalidState , Internal , INTERNAL_SERVER_ERROR ;

View File

@@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default)]
#[serde(rename_all = "camelCase", default)]
pub struct RuntimeTogglableFeatures {
pub score_details: bool,

View File

@@ -171,6 +171,15 @@ pub struct Settings<T> {
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
pub stop_words: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
pub non_separator_tokens: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
pub separator_tokens: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
pub dictionary: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
@@ -201,6 +210,9 @@ impl Settings<Checked> {
ranking_rules: Setting::Reset,
stop_words: Setting::Reset,
synonyms: Setting::Reset,
non_separator_tokens: Setting::Reset,
separator_tokens: Setting::Reset,
dictionary: Setting::Reset,
distinct_attribute: Setting::Reset,
typo_tolerance: Setting::Reset,
faceting: Setting::Reset,
@@ -217,6 +229,9 @@ impl Settings<Checked> {
sortable_attributes,
ranking_rules,
stop_words,
non_separator_tokens,
separator_tokens,
dictionary,
synonyms,
distinct_attribute,
typo_tolerance,
@@ -232,6 +247,9 @@ impl Settings<Checked> {
sortable_attributes,
ranking_rules,
stop_words,
non_separator_tokens,
separator_tokens,
dictionary,
synonyms,
distinct_attribute,
typo_tolerance,
@@ -274,6 +292,9 @@ impl Settings<Unchecked> {
ranking_rules: self.ranking_rules,
stop_words: self.stop_words,
synonyms: self.synonyms,
non_separator_tokens: self.non_separator_tokens,
separator_tokens: self.separator_tokens,
dictionary: self.dictionary,
distinct_attribute: self.distinct_attribute,
typo_tolerance: self.typo_tolerance,
faceting: self.faceting,
@@ -335,6 +356,28 @@ pub fn apply_settings_to_builder(
Setting::NotSet => (),
}
match settings.non_separator_tokens {
Setting::Set(ref non_separator_tokens) => {
builder.set_non_separator_tokens(non_separator_tokens.clone())
}
Setting::Reset => builder.reset_non_separator_tokens(),
Setting::NotSet => (),
}
match settings.separator_tokens {
Setting::Set(ref separator_tokens) => {
builder.set_separator_tokens(separator_tokens.clone())
}
Setting::Reset => builder.reset_separator_tokens(),
Setting::NotSet => (),
}
match settings.dictionary {
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
Setting::Reset => builder.reset_dictionary(),
Setting::NotSet => (),
}
match settings.synonyms {
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
Setting::Reset => builder.reset_synonyms(),
@@ -459,6 +502,11 @@ pub fn settings(
})
.transpose()?
.unwrap_or_default();
let non_separator_tokens = index.non_separator_tokens(rtxn)?.unwrap_or_default();
let separator_tokens = index.separator_tokens(rtxn)?.unwrap_or_default();
let dictionary = index.dictionary(rtxn)?.unwrap_or_default();
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
// in milli each word in the synonyms map were split on their separator. Since we lost
@@ -520,6 +568,9 @@ pub fn settings(
sortable_attributes: Setting::Set(sortable_attributes),
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
stop_words: Setting::Set(stop_words),
non_separator_tokens: Setting::Set(non_separator_tokens),
separator_tokens: Setting::Set(separator_tokens),
dictionary: Setting::Set(dictionary),
distinct_attribute: match distinct_field {
Some(field) => Setting::Set(field),
None => Setting::Reset,
@@ -642,6 +693,9 @@ pub(crate) mod test {
sortable_attributes: Setting::NotSet,
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,
separator_tokens: Setting::NotSet,
dictionary: Setting::NotSet,
synonyms: Setting::NotSet,
distinct_attribute: Setting::NotSet,
typo_tolerance: Setting::NotSet,
@@ -663,6 +717,9 @@ pub(crate) mod test {
sortable_attributes: Setting::NotSet,
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,
separator_tokens: Setting::NotSet,
dictionary: Setting::NotSet,
synonyms: Setting::NotSet,
distinct_attribute: Setting::NotSet,
typo_tolerance: Setting::NotSet,

View File

@@ -19,7 +19,6 @@ actix-http = { version = "3.3.1", default-features = false, features = [
"compress-gzip",
"rustls",
] }
actix-utils = "3.0.1"
actix-web = { version = "4.3.1", default-features = false, features = [
"macros",
"compress-brotli",
@@ -51,14 +50,13 @@ futures-util = "0.3.28"
http = "0.2.9"
index-scheduler = { path = "../index-scheduler" }
indexmap = { version = "1.9.3", features = ["serde-1"] }
is-terminal = "0.4.8"
itertools = "0.10.5"
jsonwebtoken = "8.3.0"
lazy_static = "1.4.0"
log = "0.4.17"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
mimalloc = { version = "0.1.37", default-features = false }
mimalloc = { version = "0.1.36", default-features = false }
mime = "0.3.17"
num_cpus = "1.15.0"
obkv = "0.2.0"
@@ -104,6 +102,8 @@ uuid = { version = "1.3.1", features = ["serde", "v4"] }
walkdir = "2.3.3"
yaup = "0.2.1"
serde_urlencoded = "0.7.1"
actix-utils = "3.0.1"
atty = "0.2.14"
termcolor = "1.2.0"
[dev-dependencies]
@@ -154,5 +154,5 @@ thai = ["meilisearch-types/thai"]
greek = ["meilisearch-types/greek"]
[package.metadata.mini-dashboard]
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.11/build.zip"
sha1 = "83cd44ed1e5f97ecb581dc9f958a63f4ccc982d9"
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.7/build.zip"
sha1 = "28b45bf772c84f9a6e16bc1689b393bfce8da7d6"

View File

@@ -574,10 +574,6 @@ pub struct SearchAggregator {
filter_total_number_of_criteria: usize,
used_syntax: HashMap<String, usize>,
// attributes_to_search_on
// every time a search is done using attributes_to_search_on
attributes_to_search_on_total_number_of_uses: usize,
// q
// The maximum number of terms in a q request
max_terms_number: usize,
@@ -651,11 +647,6 @@ impl SearchAggregator {
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
}
// attributes_to_search_on
if let Some(_) = query.attributes_to_search_on {
ret.attributes_to_search_on_total_number_of_uses = 1;
}
if let Some(ref q) = query.q {
ret.max_terms_number = q.split_whitespace().count();
}
@@ -729,18 +720,9 @@ impl SearchAggregator {
let used_syntax = self.used_syntax.entry(key).or_insert(0);
*used_syntax = used_syntax.saturating_add(value);
}
// attributes_to_search_on
self.attributes_to_search_on_total_number_of_uses = self
.attributes_to_search_on_total_number_of_uses
.saturating_add(other.attributes_to_search_on_total_number_of_uses);
// q
self.max_terms_number = self.max_terms_number.max(other.max_terms_number);
// vector
self.max_vector_size = self.max_vector_size.max(other.max_vector_size);
// pagination
self.max_limit = self.max_limit.max(other.max_limit);
self.max_offset = self.max_offset.max(other.max_offset);
@@ -779,17 +761,17 @@ impl SearchAggregator {
if self.total_received == 0 {
None
} else {
// the index of the 99th percentage of value
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
// we get all the values in a sorted manner
let time_spent = self.time_spent.into_sorted_vec();
// the index of the 99th percentage of value
let percentile_99th = time_spent.len() * 99 / 100;
// We are only interested by the slowest value of the 99th fastest results
let time_spent = time_spent.get(percentile_99th);
let time_spent = time_spent.get(percentile_99th as usize);
let properties = json!({
"user-agent": self.user_agents,
"requests": {
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
"total_succeeded": self.total_succeeded,
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
"total_received": self.total_received,
@@ -804,15 +786,9 @@ impl SearchAggregator {
"avg_criteria_number": format!("{:.2}", self.filter_sum_of_criteria_terms as f64 / self.filter_total_number_of_criteria as f64),
"most_used_syntax": self.used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
},
"attributes_to_search_on": {
"total_number_of_uses": self.attributes_to_search_on_total_number_of_uses,
},
"q": {
"max_terms_number": self.max_terms_number,
},
"vector": {
"max_vector_size": self.max_vector_size,
},
"pagination": {
"max_limit": self.max_limit,
"max_offset": self.max_offset,
@@ -867,10 +843,6 @@ pub struct MultiSearchAggregator {
// sum of the number of search queries in the requests, use with total_received to compute an average
total_search_count: usize,
// scoring
show_ranking_score: bool,
show_ranking_score_details: bool,
// context
user_agents: HashSet<String>,
}
@@ -884,9 +856,6 @@ impl MultiSearchAggregator {
let distinct_indexes: HashSet<_> =
query.iter().map(|query| query.index_uid.as_str()).collect();
let show_ranking_score = query.iter().any(|query| query.show_ranking_score);
let show_ranking_score_details = query.iter().any(|query| query.show_ranking_score_details);
Self {
timestamp,
total_received: 1,
@@ -894,8 +863,6 @@ impl MultiSearchAggregator {
total_distinct_index_count: distinct_indexes.len(),
total_single_index: if distinct_indexes.len() == 1 { 1 } else { 0 },
total_search_count: query.len(),
show_ranking_score,
show_ranking_score_details,
user_agents,
}
}
@@ -917,9 +884,6 @@ impl MultiSearchAggregator {
this.total_distinct_index_count.saturating_add(other.total_distinct_index_count);
let total_single_index = this.total_single_index.saturating_add(other.total_single_index);
let total_search_count = this.total_search_count.saturating_add(other.total_search_count);
let show_ranking_score = this.show_ranking_score || other.show_ranking_score;
let show_ranking_score_details =
this.show_ranking_score_details || other.show_ranking_score_details;
let mut user_agents = this.user_agents;
for user_agent in other.user_agents.into_iter() {
@@ -935,8 +899,6 @@ impl MultiSearchAggregator {
total_single_index,
total_search_count,
user_agents,
show_ranking_score,
show_ranking_score_details,
// do not add _ or ..Default::default() here
};
@@ -963,10 +925,6 @@ impl MultiSearchAggregator {
"searches": {
"total_search_count": self.total_search_count,
"avg_search_count": (self.total_search_count as f64) / (self.total_received as f64),
},
"scoring": {
"show_ranking_score": self.show_ranking_score,
"show_ranking_score_details": self.show_ranking_score_details,
}
});
@@ -1187,7 +1145,6 @@ pub struct DocumentsDeletionAggregator {
#[serde(rename = "user-agent")]
user_agents: HashSet<String>,
#[serde(rename = "requests.total_received")]
total_received: usize,
per_document_id: bool,
clear_all: bool,
@@ -1338,7 +1295,6 @@ pub struct HealthAggregator {
#[serde(rename = "user-agent")]
user_agents: HashSet<String>,
#[serde(rename = "requests.total_received")]
total_received: usize,
}
@@ -1389,7 +1345,7 @@ pub struct DocumentsFetchAggregator {
#[serde(rename = "user-agent")]
user_agents: HashSet<String>,
#[serde(rename = "requests.total_received")]
#[serde(rename = "requests.max_limit")]
total_received: usize,
// a call on ../documents/:doc_id

View File

@@ -1,5 +1,5 @@
use std::env;
use std::io::{stderr, Write};
use std::io::Write;
use std::path::PathBuf;
use std::sync::Arc;
@@ -7,7 +7,6 @@ use actix_web::http::KeepAlive;
use actix_web::web::Data;
use actix_web::HttpServer;
use index_scheduler::IndexScheduler;
use is_terminal::IsTerminal;
use meilisearch::analytics::Analytics;
use meilisearch::{analytics, create_app, prototype_name, setup_meilisearch, Opt};
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
@@ -191,7 +190,7 @@ Anonymous telemetry:\t\"Enabled\""
}
eprintln!();
eprintln!("Check out Meilisearch Cloud!\thttps://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=engine&utm_medium=cli");
eprintln!("Check out Meilisearch Cloud!\thttps://cloud.meilisearch.com/login?utm_campaign=oss&utm_source=engine&utm_medium=cli");
eprintln!("Documentation:\t\t\thttps://www.meilisearch.com/docs");
eprintln!("Source code:\t\t\thttps://github.com/meilisearch/meilisearch");
eprintln!("Discord:\t\t\thttps://discord.meilisearch.com");
@@ -202,7 +201,8 @@ const WARNING_BG_COLOR: Option<Color> = Some(Color::Ansi256(178));
const WARNING_FG_COLOR: Option<Color> = Some(Color::Ansi256(0));
fn print_master_key_too_short_warning() {
let choice = if stderr().is_terminal() { ColorChoice::Auto } else { ColorChoice::Never };
let choice =
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
let mut stderr = StandardStream::stderr(choice);
stderr
.set_color(
@@ -227,7 +227,8 @@ fn print_master_key_too_short_warning() {
}
fn print_missing_master_key_warning() {
let choice = if stderr().is_terminal() { ColorChoice::Auto } else { ColorChoice::Never };
let choice =
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
let mut stderr = StandardStream::stderr(choice);
stderr
.set_color(

View File

@@ -50,10 +50,4 @@ lazy_static! {
&["kind", "value"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_LAST_UPDATE: IntGauge =
register_int_gauge!(opts!("meilisearch_last_update", "Meilisearch Last Update"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_IS_INDEXING: IntGauge =
register_int_gauge!(opts!("meilisearch_is_indexing", "Meilisearch Is Indexing"))
.expect("Can't create a metric");
}

View File

@@ -64,20 +64,7 @@ async fn patch_features(
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
};
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
// the it renames to camelCase, which we don't want for analytics.
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
let meilisearch_types::features::RuntimeTogglableFeatures { score_details, vector_store } =
new_features;
analytics.publish(
"Experimental features Updated".to_string(),
json!({
"score_details": score_details,
"vector_store": vector_store,
}),
Some(&req),
);
analytics.publish("Experimental features Updated".to_string(), json!(new_features), Some(&req));
index_scheduler.put_runtime_features(new_features)?;
Ok(HttpResponse::Ok().json(new_features))
}

View File

@@ -35,7 +35,7 @@ pub struct SearchQueryGet {
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
q: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchVector>)]
vector: Option<CS<f32>>,
vector: Option<Vec<f32>>,
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSearchOffset>)]
offset: Param<usize>,
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSearchLimit>)]
@@ -88,7 +88,7 @@ impl From<SearchQueryGet> for SearchQuery {
Self {
q: other.q,
vector: other.vector.map(CS::into_inner),
vector: other.vector,
offset: other.offset.0,
limit: other.limit.0,
page: other.page.as_deref().copied(),

View File

@@ -5,7 +5,6 @@ use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError;
use meilisearch_types::facet_values_sort::FacetValuesSort;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent;
@@ -310,6 +309,81 @@ make_setting_route!(
}
);
make_setting_route!(
"/non-separator-tokens",
put,
std::collections::BTreeSet<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsNonSeparatorTokens,
>,
non_separator_tokens,
"nonSeparatorTokens",
analytics,
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
use serde_json::json;
analytics.publish(
"nonSeparatorTokens Updated".to_string(),
json!({
"non_separator_tokens": {
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
},
}),
Some(req),
);
}
);
make_setting_route!(
"/separator-tokens",
put,
std::collections::BTreeSet<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsSeparatorTokens,
>,
separator_tokens,
"separatorTokens",
analytics,
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
use serde_json::json;
analytics.publish(
"separatorTokens Updated".to_string(),
json!({
"separator_tokens": {
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
},
}),
Some(req),
);
}
);
make_setting_route!(
"/dictionary",
put,
std::collections::BTreeSet<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsDictionary,
>,
dictionary,
"dictionary",
analytics,
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
use serde_json::json;
analytics.publish(
"dictionary Updated".to_string(),
json!({
"dictionary": {
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
},
}),
Some(req),
);
}
);
make_setting_route!(
"/synonyms",
put,
@@ -551,16 +625,10 @@ pub async fn update_all(
.as_ref()
.set()
.and_then(|s| s.max_values_per_facet.as_ref().set()),
"sort_facet_values_by_star_count": new_settings.faceting
"sort_facet_values_by": new_settings.faceting
.as_ref()
.set()
.and_then(|s| {
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
}),
"sort_facet_values_by_total": new_settings.faceting
.as_ref()
.set()
.and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
.and_then(|s| s.sort_facet_values_by.as_ref().set()),
},
"pagination": {
"max_total_hits": new_settings.pagination

View File

@@ -49,11 +49,6 @@ pub async fn get_metrics(
}
}
if let Some(last_update) = response.last_update {
crate::metrics::MEILISEARCH_LAST_UPDATE.set(last_update.unix_timestamp());
}
crate::metrics::MEILISEARCH_IS_INDEXING.set(index_scheduler.is_task_processing()? as i64);
let encoder = TextEncoder::new();
let mut buffer = vec![];
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");

View File

@@ -284,6 +284,9 @@ pub fn create_all_stats(
used_database_size += index_scheduler.used_size()?;
database_size += auth_controller.size()?;
used_database_size += auth_controller.used_size()?;
let update_file_size = index_scheduler.compute_update_file_size()?;
database_size += update_file_size;
used_database_size += update_file_size;
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
Ok(stats)

View File

@@ -325,7 +325,7 @@ async fn cancel_tasks(
let query = params.into_query();
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
&index_scheduler.read_txn()?,
&query,
index_scheduler.filters(),
@@ -370,7 +370,7 @@ async fn delete_tasks(
);
let query = params.into_query();
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
&index_scheduler.read_txn()?,
&query,
index_scheduler.filters(),
@@ -387,7 +387,6 @@ async fn delete_tasks(
#[derive(Debug, Serialize)]
pub struct AllTasks {
results: Vec<TaskView>,
total: u64,
limit: u32,
from: Option<u32>,
next: Option<u32>,
@@ -407,17 +406,23 @@ async fn get_tasks(
let limit = params.limit.0;
let query = params.into_query();
let filters = index_scheduler.filters();
let (tasks, total) = index_scheduler.get_tasks_from_authorized_indexes(query, filters)?;
let mut results: Vec<_> = tasks.iter().map(TaskView::from_task).collect();
let mut tasks_results: Vec<TaskView> = index_scheduler
.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?
.into_iter()
.map(|t| TaskView::from_task(&t))
.collect();
// If we were able to fetch the number +1 tasks we asked
// it means that there is more to come.
let next = if results.len() == limit as usize { results.pop().map(|t| t.uid) } else { None };
let next = if tasks_results.len() == limit as usize {
tasks_results.pop().map(|t| t.uid)
} else {
None
};
let from = results.first().map(|t| t.uid);
let tasks = AllTasks { results, limit: limit.saturating_sub(1), total, from, next };
let from = tasks_results.first().map(|t| t.uid);
let tasks = AllTasks { results: tasks_results, limit: limit.saturating_sub(1), from, next };
Ok(HttpResponse::Ok().json(tasks))
}
@@ -439,10 +444,10 @@ async fn get_task(
analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req));
let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() };
let filters = index_scheduler.filters();
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(query, filters)?;
if let Some(task) = tasks.first() {
if let Some(task) =
index_scheduler.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?.first()
{
let task_view = TaskView::from_task(task);
Ok(HttpResponse::Ok().json(task_view))
} else {

View File

@@ -491,6 +491,20 @@ pub fn perform_search(
tokenizer_builder.allow_list(&script_lang_map);
}
let separators = index.allowed_separators(&rtxn)?;
let separators: Option<Vec<_>> =
separators.as_ref().map(|x| x.iter().map(String::as_str).collect());
if let Some(ref separators) = separators {
tokenizer_builder.separators(separators);
}
let dictionary = index.dictionary(&rtxn)?;
let dictionary: Option<Vec<_>> =
dictionary.as_ref().map(|x| x.iter().map(String::as_str).collect());
if let Some(ref dictionary) = dictionary {
tokenizer_builder.words_dict(dictionary);
}
let mut formatter_builder = MatcherBuilder::new(matching_words, tokenizer_builder.build());
formatter_builder.crop_marker(query.crop_marker);
formatter_builder.highlight_prefix(query.highlight_pre_tag);

View File

@@ -61,8 +61,6 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
("POST", "/keys") => hashset!{"keys.create", "*"},
("GET", "/keys") => hashset!{"keys.get", "*"},
("GET", "/experimental-features") => hashset!{"experimental.get", "*"},
("PATCH", "/experimental-features") => hashset!{"experimental.update", "*"},
};
authorizations

View File

@@ -189,14 +189,6 @@ impl Server {
let url = format!("/tasks/{}", update_id);
self.service.get(url).await
}
pub async fn get_features(&self) -> (Value, StatusCode) {
self.service.get("/experimental-features").await
}
pub async fn set_features(&self, value: Value) -> (Value, StatusCode) {
self.service.patch("/experimental-features", value).await
}
}
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {

File diff suppressed because it is too large Load Diff

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:08:54.713227Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:08:54.713227Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:08:54.713227Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:08:54.713227Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:08:54.713227Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:08:54.713227Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:08:54.713227Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:34:25.351927Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:34:25.351927Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -40,7 +40,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:34:48.1719Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -40,7 +40,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:34:48.1719Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -40,7 +40,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:34:48.1719Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -40,7 +40,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:34:48.1719Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -40,7 +40,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:34:48.1719Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -45,7 +45,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:26:57.319083Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:28:46.369971Z"
}
],
"total": 92,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:28:46.369971Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:28:46.369971Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:28:46.369971Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:28:46.369971Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T09:28:46.369971Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:21:54.691484Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:21:54.691484Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -41,7 +41,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:40:28.669652Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 92,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:21:54.691484Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:21:54.691484Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -36,7 +36,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:24:39.812922Z"
}
],
"total": 2,
"limit": 1,
"from": 1,
"next": 0

View File

@@ -41,7 +41,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:40:28.669652Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 92,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:51:53.095314Z"
}
],
"total": 93,
"limit": 1,
"from": 92,
"next": 91

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

View File

@@ -20,7 +20,6 @@ source: meilisearch/tests/dumps/mod.rs
"finishedAt": "2021-09-08T08:31:12.304168Z"
}
],
"total": 1,
"limit": 1,
"from": 0,
"next": null

Some files were not shown because too many files have changed in this diff Show More