Compare commits

...

88 Commits

Author SHA1 Message Date
9640c2aaa6 Merge pull request #349 from meilisearch/bump-version
Bump meilisearch crates to v0.8.2
2019-11-28 17:23:40 +01:00
9a2b4d08e1 Bump meilisearch crates to v0.8.2 2019-11-28 17:15:13 +01:00
e91615fe59 Merge pull request #348 from meilisearch/replace-isahc-by-ureq
Replace isahc by ureq
2019-11-28 17:14:32 +01:00
aed02b2e19 Remove many dependencies from the Dockerfile 2019-11-28 17:04:01 +01:00
83ad80d9db Replace isahc by ureq 2019-11-28 16:41:42 +01:00
abdb7793fb Merge pull request #345 from tpayet/readme_changes
Clarification of readme file
2019-11-28 16:35:44 +01:00
387eb3fde3 Clarification of readme file 2019-11-28 16:28:25 +01:00
e640bc90b4 Merge pull request #343 from meilisearch/explicit-index-clear
Change the update loop to be more explicit on index clear
2019-11-28 14:48:37 +01:00
3978378152 Merge pull request #344 from tpayet/patch-1
Update README license badge
2019-11-28 14:35:50 +01:00
61e3e4f0b9 Update README license badge 2019-11-28 14:28:30 +01:00
1def56ea11 Change the update loop to be more explicit on index clear 2019-11-27 13:43:28 +01:00
6d686ac14f Merge pull request #342 from meilisearch/update-lock
Update the lock file
2019-11-27 12:49:47 +01:00
641e0d15f5 Make sure the lock file is up to date 2019-11-27 12:06:14 +01:00
71b39426c0 Update the lock file 2019-11-27 12:01:22 +01:00
57584eaccc Merge pull request #341 from meilisearch/bump-version
Bump meilisearch crates to v0.8.1
2019-11-27 11:54:39 +01:00
f6fb31c531 Bump meilisearch crates to v0.8.1 2019-11-27 11:47:27 +01:00
0cea8ce5b5 Merge pull request #340 from meilisearch/separate-updates-kvstore
Separate the update and main databases
2019-11-27 11:39:14 +01:00
d08b76a323 Separate the update and main databases
We used the heed typed transaction to make it safe (https://github.com/Kerollmops/heed/pull/27).
2019-11-27 11:29:06 +01:00
86a87d6032 Merge pull request #339 from tpayet/action-docker-tag
Update action workflow for docker tagged image
2019-11-26 19:17:19 +01:00
e534929f80 Update action workflow for docker tagged image 2019-11-26 18:18:51 +01:00
fcc154da1c Merge pull request #336 from meilisearch/rename-to-meilisearch
Rename MeiliDB into MeiliSearch
2019-11-26 14:06:01 +01:00
00d1200704 Rename the meilisearch-http binary into meilisearch 2019-11-26 11:17:30 +01:00
7cc096e0a2 Rename MeiliDB into MeiliSearch 2019-11-26 11:12:30 +01:00
58eaf78dc4 Merge pull request #335 from tpayet/github-release-action
GitHub release action
2019-11-25 19:19:08 +01:00
3be2281483 Update workflows README 2019-11-25 18:14:21 +01:00
cc06d96993 Add gh actions to release binaries 2019-11-25 17:27:15 +01:00
93c7e700bc Merge pull request #333 from tpayet/update-dockerfile
Add meilihttp_addr env variable in docker build
2019-11-25 16:41:52 +01:00
97c6757fc7 Add meilihttp_addr env variable in docker build 2019-11-25 16:30:07 +01:00
276d3f8e22 Merge pull request #332 from meilisearch/jemalloc-only-on-linux
Make jemalloc only used on linux
2019-11-25 16:13:54 +01:00
4869a88ae2 Make jemalloc only used on linux 2019-11-25 15:35:13 +01:00
ae88bc31bc Merge pull request #331 from meilisearch/enable-jemalloc-linux-only
Enable jemalloc only on linux OSs
2019-11-25 14:59:56 +01:00
8aed1d96c5 Enable jemalloc only on linux OSs 2019-11-25 14:51:47 +01:00
c93949474c Merge pull request #330 from tpayet/fix-actions-badge-link
Update action badge link
2019-11-25 13:51:07 +01:00
8cf19f1c6b Update action badge link 2019-11-25 13:44:20 +01:00
a82ecb3cef Merge pull request #324 from tpayet/gh-actions
Replace Azure CI by Github Actions
2019-11-25 13:31:15 +01:00
04c2b37d82 Remove Azure CI
Add gh actions for cargo check using rust nightly

Add readme about actions workflows

Add basic Dockerfile

Add action workflow for docker publish

Change check action to test action

Update workflow readme without rust nightly

Rename test action file

Add gh actions to push latest docker image from master

Update github action for publish docker image

Add 2 steps dockerfile based on alpine

Update readme badges to match new CI
2019-11-25 13:20:54 +01:00
ab3e8d6537 Merge pull request #314 from meilisearch/fix-number-ord
Fix the ordering functions of the Number type
2019-11-22 15:14:05 +01:00
fd185a5e6b Add a test for the SorByAttr criterion 2019-11-22 15:04:23 +01:00
d9678f0040 Fix the ordering functions of the Number type 2019-11-22 14:44:02 +01:00
840217b111 Merge pull request #321 from meilisearch/fix-create-index
Fix index creation
2019-11-22 14:10:05 +01:00
9605a2cd88 Make possible to use a custom uid and simplify the usage 2019-11-22 14:01:00 +01:00
0f86ccc035 Index UID generation makes sure to not generate the same number 2019-11-22 14:01:00 +01:00
b3b73e2276 Merge pull request #323 from meilisearch/fix-index-deletion
Fix index deletion once again
2019-11-22 14:00:19 +01:00
f241c999ad Make the CI use rust stable 2019-11-22 13:47:29 +01:00
d4d2a2303a Fix a typo on timeout_ms used for multi index search 2019-11-22 13:47:29 +01:00
c8832409ad Fix the dead lock on index deletion once again 2019-11-22 13:47:29 +01:00
98f76aa952 Merge pull request #320 from meilisearch/send-amplitude-events
Add an Amplitude analysis loop tick
2019-11-22 10:52:29 +01:00
4236632af6 Add an amplitude analysis loop tick 2019-11-21 20:28:58 +01:00
e2c98244ec Merge pull request #313 from meilisearch/fix-dead-lock
Fix dead locks when deleting indexes
2019-11-21 12:42:40 +01:00
c1cf67c008 Join updates threads after dropping the indexes lock and avoid deadlocks 2019-11-21 12:01:46 +01:00
4abea919b2 Merge pull request #311 from meilisearch/add-index-name-and-id
Add index name and change some routes request body & response
2019-11-21 11:59:14 +01:00
d60aa722c0 Allow to update expireAt and revoked on token 2019-11-21 11:49:49 +01:00
055368acd8 Fix for review 2019-11-21 11:49:49 +01:00
7f2e5d091a Rename routes /synonym to /synonyms 2019-11-20 15:33:42 +01:00
c69ae8154f Allow to receive schema update formated as SchemaBuilder 2019-11-20 15:25:34 +01:00
cd95b243bb Add the update index route 2019-11-20 15:00:06 +01:00
1f1cb1f501 Rename browse_documents into get_all_documents and always respond HTTP Ok 2019-11-20 14:18:21 +01:00
530738cfe9 Format code 2019-11-20 14:12:12 +01:00
878dd6912e Return a HTTP 401 instead of 404 if token is not found 2019-11-20 14:06:56 +01:00
5f0f699f37 Move route to clear all synonyms on DELETE /synonyms 2019-11-20 14:03:55 +01:00
ca13900699 Add async routes should return ACCEPTED status code response 2019-11-20 14:03:19 +01:00
cc97889b37 Add stop-word is now PATCH method 2019-11-20 13:56:43 +01:00
45ded0498b Format code with cargo fmt 2019-11-20 11:45:23 +01:00
d01a3944c1 Add last_update information on global /stats route 2019-11-20 11:45:22 +01:00
a0caf0d6d7 Remove unused result response on indexes_uids function 2019-11-20 11:45:22 +01:00
e22debb994 Update index updated_at information at each update callback 2019-11-20 11:45:22 +01:00
1b8df0ed8b Remove last_update from stats 2019-11-20 11:45:22 +01:00
3286a5213c Move fields frequency from common store to index main store 2019-11-20 11:45:22 +01:00
394976d330 Update list_index route to return all index information, not only list of uid 2019-11-20 11:45:22 +01:00
b95acbece0 Function generate_uid return now lowercased uid 2019-11-20 11:45:22 +01:00
c94f4dff71 Do not return update_id on IndexCreateRespnse if it's none 2019-11-20 11:45:22 +01:00
e6465f4ea1 Create a new specific route for schema 2019-11-20 11:45:22 +01:00
2b3c91aabd Update get_index_schema to allow raw response 2019-11-20 11:45:22 +01:00
e97e13ce9f Rename index_name to index_uids 2019-11-20 11:45:22 +01:00
39e2b73718 Add updatedAt on main index store 2019-11-20 11:45:22 +01:00
a90facaa41 Rename index_name by index_uid 2019-11-20 11:45:22 +01:00
5527457655 Rewrite create_index route new path, body request and response 2019-11-20 11:45:21 +01:00
076e781810 Add name, created_at and updated_at informations into main index 2019-11-20 11:45:21 +01:00
750d336018 Bump Cargo.lock meili versions 2019-11-20 11:45:21 +01:00
e8251ad45b Merge pull request #310 from meilisearch/unify-crates-version
Unify the crates versions to 0.8.0
2019-11-20 11:05:54 +01:00
963ca1e2c7 Unify the crates versions to 0.8.0 2019-11-20 10:47:32 +01:00
12a6c7d54d Merge pull request #298 from bidoubiwa/add_ranked_movies_dataset
Create a dataset where the release_date is a numeric timestamp
2019-11-20 10:46:24 +01:00
2d0fc3f9d3 Create a dataset where the release_date is a numeric timestamp 2019-11-20 10:44:32 +01:00
e554784527 Merge pull request #309 from bidoubiwa/remove_stop_words_from_settings
Removed stop words from settings route
2019-11-19 18:35:27 +01:00
2cb43fa638 Removed stop words from settings route 2019-11-19 18:21:44 +01:00
66d5309a51 Merge pull request #308 from meilisearch/improve-structopt
Introduce better argument names
2019-11-19 18:09:44 +01:00
7eeedec7eb Bump meilidb-http to v0.3.0 2019-11-19 17:50:01 +01:00
4b798c71ae Introduce new arguments and understand env vars 2019-11-19 17:50:01 +01:00
103 changed files with 21795 additions and 20781 deletions

5
.dockerignore Normal file
View File

@ -0,0 +1,5 @@
target
Dockerfile
.dockerignore
.git
.gitignore

11
.github/workflows/README.md vendored Normal file
View File

@ -0,0 +1,11 @@
# GitHub actions workflow for MeiliDB
> **Note:**
> - We do not use [cache](https://github.com/actions/cache) yet but we could use it to speed up CI
## Workflow
- On each pull request, we are triggering `cargo test`.
- On each commit on master, we are building the latest docker image.
- On each tag, we are building the tagged docker image and the binaries for MacOS & Ubuntu.

36
.github/workflows/publish-binaries.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: Publish binaries to GitHub release
on:
push:
tags:
- '*'
jobs:
publish:
name: Publish for ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
include:
- os: ubuntu-latest
artifact_name: meilisearch
asset_name: meilisearch-linux-amd64
- os: macos-latest
artifact_name: meilisearch
asset_name: meilisearch-macos-amd64
steps:
- uses: hecrj/setup-rust-action@master
with:
rust-version: stable
- uses: actions/checkout@v1
- name: Build
run: cargo build --release --locked
- name: Upload binaries to release
uses: svenstaro/upload-release-action@v1-release
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: target/release/${{ matrix.artifact_name }}
asset_name: ${{ matrix.asset_name }}
tag: ${{ github.ref }}

View File

@ -0,0 +1,19 @@
---
on:
push:
branches:
- master
name: Publish latest image to Docker Hub
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Publish to Registry
uses: elgohr/Publish-Docker-Github-Action@master
with:
name: getmeili/meilisearch
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

View File

@ -0,0 +1,20 @@
---
on:
push:
tags:
- '*'
name: Publish tagged image to Docker Hub
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Publish to Registry
uses: elgohr/Publish-Docker-Github-Action@master
with:
name: getmeili/meilisearch
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
tag_names: true

21
.github/workflows/test.yml vendored Normal file
View File

@ -0,0 +1,21 @@
---
on: [pull_request]
name: Cargo test
jobs:
check:
name: MeiliSearch
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Run cargo test
uses: actions-rs/cargo@v1
with:
command: test
args: --locked

387
Cargo.lock generated
View File

@ -38,6 +38,11 @@ dependencies = [
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "anyhow"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
version = "0.4.3" version = "0.4.3"
@ -97,6 +102,19 @@ dependencies = [
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "base-x"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "base64"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "bincode" name = "bincode"
version = "1.2.0" version = "1.2.0"
@ -150,6 +168,11 @@ dependencies = [
"safemem 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "safemem 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "bumpalo"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
version = "1.3.2" version = "1.3.2"
@ -199,6 +222,11 @@ dependencies = [
"time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "chunked_transfer"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "clap" name = "clap"
version = "2.33.0" version = "2.33.0"
@ -343,6 +371,11 @@ name = "deunicode"
version = "1.0.0" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "discard"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "doc-comment" name = "doc-comment"
version = "0.3.1" version = "0.3.1"
@ -370,24 +403,6 @@ dependencies = [
"termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "envconfig"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "envconfig_derive"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "error-chain" name = "error-chain"
version = "0.12.1" version = "0.12.1"
@ -602,7 +617,7 @@ dependencies = [
[[package]] [[package]]
name = "heed" name = "heed"
version = "0.5.0" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -779,6 +794,14 @@ dependencies = [
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "js-sys"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"wasm-bindgen 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "kernel32-sys" name = "kernel32-sys"
version = "0.2.2" version = "0.2.2"
@ -856,8 +879,8 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "meilidb-core" name = "meilisearch-core"
version = "0.6.5" version = "0.8.2"
dependencies = [ dependencies = [
"arc-swap 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "arc-swap 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -870,13 +893,13 @@ dependencies = [
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "fst 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"hashbrown 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", "hashbrown 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"heed 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "heed 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"levenshtein_automata 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "levenshtein_automata 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"meilidb-schema 0.6.0", "meilisearch-schema 0.8.2",
"meilidb-tokenizer 0.6.1", "meilisearch-tokenizer 0.8.2",
"meilidb-types 0.1.0", "meilisearch-types 0.8.2",
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -893,42 +916,44 @@ dependencies = [
] ]
[[package]] [[package]]
name = "meilidb-http" name = "meilisearch-http"
version = "0.1.1" version = "0.8.2"
dependencies = [ dependencies = [
"async-compression 0.1.0-alpha.7 (registry+https://github.com/rust-lang/crates.io-index)", "async-compression 0.1.0-alpha.7 (registry+https://github.com/rust-lang/crates.io-index)",
"bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"envconfig 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "heed 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"envconfig_derive 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"heed 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)",
"indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"main_error 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "main_error 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"meilidb-core 0.6.5", "meilisearch-core 0.8.2",
"meilidb-schema 0.6.0", "meilisearch-schema 0.8.2",
"pretty-bytes 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "pretty-bytes 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_qs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"siphasher 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"structopt 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "structopt 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
"sysinfo 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", "sysinfo 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)",
"tide 0.2.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)", "tide 0.2.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)",
"tide-compression 0.1.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)", "tide-compression 0.1.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)",
"tide-log 0.1.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)", "tide-log 0.1.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)",
"tide-slog 0.1.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)", "tide-slog 0.1.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)",
"ureq 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
"vergen 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "vergen 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"whoami 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "meilidb-schema" name = "meilisearch-schema"
version = "0.6.0" version = "0.8.2"
dependencies = [ dependencies = [
"bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -938,16 +963,16 @@ dependencies = [
] ]
[[package]] [[package]]
name = "meilidb-tokenizer" name = "meilisearch-tokenizer"
version = "0.6.1" version = "0.8.2"
dependencies = [ dependencies = [
"deunicode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "deunicode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slice-group-by 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "slice-group-by 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "meilidb-types" name = "meilisearch-types"
version = "0.1.0" version = "0.8.2"
dependencies = [ dependencies = [
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"zerocopy 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "zerocopy 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1063,6 +1088,15 @@ dependencies = [
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "nom"
version = "4.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "num-integer" name = "num-integer"
version = "0.1.41" version = "0.1.41"
@ -1260,6 +1294,14 @@ dependencies = [
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "qstring"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "quick-error" name = "quick-error"
version = "1.2.2" version = "1.2.2"
@ -1481,6 +1523,20 @@ dependencies = [
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "ring"
version = "0.16.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.65 (registry+https://github.com/rust-lang/crates.io-index)",
"spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"web-sys 0.3.32 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "route-recognizer" name = "route-recognizer"
version = "0.1.13" version = "0.1.13"
@ -1499,6 +1555,18 @@ dependencies = [
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rustls"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ring 0.16.9 (registry+https://github.com/rust-lang/crates.io-index)",
"sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"webpki 0.21.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "rustyline" name = "rustyline"
version = "5.0.4" version = "5.0.4"
@ -1537,6 +1605,15 @@ name = "scopeguard"
version = "1.0.0" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "sct"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ring 0.16.9 (registry+https://github.com/rust-lang/crates.io-index)",
"untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "sdset" name = "sdset"
version = "0.3.3" version = "0.3.3"
@ -1586,7 +1663,7 @@ dependencies = [
[[package]] [[package]]
name = "serde_qs" name = "serde_qs"
version = "0.5.0" version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"data-encoding 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "data-encoding 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1606,6 +1683,11 @@ dependencies = [
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "sha1"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "siphasher" name = "siphasher"
version = "0.2.3" version = "0.2.3"
@ -1639,6 +1721,63 @@ dependencies = [
"maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "sourcefile"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "stdweb"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"discard 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
"stdweb-derive 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"stdweb-internal-macros 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"stdweb-internal-runtime 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "stdweb-derive"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "stdweb-internal-macros"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"base-x 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
"sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "stdweb-internal-runtime"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "string" name = "string"
version = "0.2.1" version = "0.2.1"
@ -1880,7 +2019,7 @@ dependencies = [
"http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_qs 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde_qs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tide-core 0.2.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)", "tide-core 0.2.0 (git+https://github.com/rustasync/tide?rev=e77709370bb24cf776fe6da902467c35131535b1)",
] ]
@ -2093,6 +2232,27 @@ name = "unicode-xid"
version = "0.2.0" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "untrusted"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ureq"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"chunked_transfer 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cookie 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"qstring 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustls 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"webpki 0.21.0 (registry+https://github.com/rust-lang/crates.io-index)",
"webpki-roots 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "url" name = "url"
version = "1.7.2" version = "1.7.2"
@ -2168,6 +2328,115 @@ name = "wasi"
version = "0.7.0" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "wasm-bindgen"
version = "0.2.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-macro 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-shared 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-macro-support 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-backend 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-shared 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "wasm-bindgen-webidl"
version = "0.2.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"anyhow 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
"heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-backend 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
"weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "web-sys"
version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"anyhow 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
"js-sys 0.3.32 (registry+https://github.com/rust-lang/crates.io-index)",
"sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
"wasm-bindgen-webidl 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "webpki"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ring 0.16.9 (registry+https://github.com/rust-lang/crates.io-index)",
"untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "webpki-roots"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"webpki 0.21.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "weedle"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "whoami"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"stdweb 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "winapi" name = "winapi"
version = "0.2.8" version = "0.2.8"
@ -2275,6 +2544,7 @@ dependencies = [
"checksum ahash 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "2f00e10d4814aa20900e7948174384f79f1317f24f0ba7494e735111653fc330" "checksum ahash 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "2f00e10d4814aa20900e7948174384f79f1317f24f0ba7494e735111653fc330"
"checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d"
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
"checksum anyhow 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "9267dff192e68f3399525901e709a48c1d3982c9c072fa32f2127a0cb0babf14"
"checksum arc-swap 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f1a1eca3195b729bbd64e292ef2f5fff6b1c28504fed762ce2b1013dde4d8e92" "checksum arc-swap 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f1a1eca3195b729bbd64e292ef2f5fff6b1c28504fed762ce2b1013dde4d8e92"
"checksum assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7deb0a829ca7bcfaf5da70b073a8d128619259a7be8216a355e23f00763059e5" "checksum assert_matches 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7deb0a829ca7bcfaf5da70b073a8d128619259a7be8216a355e23f00763059e5"
"checksum async-compression 0.1.0-alpha.7 (registry+https://github.com/rust-lang/crates.io-index)" = "111a881dbe8c614f7905f52bd305d7c57c3bcd47feb0aba9da976fee662b98af" "checksum async-compression 0.1.0-alpha.7 (registry+https://github.com/rust-lang/crates.io-index)" = "111a881dbe8c614f7905f52bd305d7c57c3bcd47feb0aba9da976fee662b98af"
@ -2282,18 +2552,22 @@ dependencies = [
"checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" "checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
"checksum backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)" = "924c76597f0d9ca25d762c25a4d369d51267536465dc5064bdf0eb073ed477ea" "checksum backtrace 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)" = "924c76597f0d9ca25d762c25a4d369d51267536465dc5064bdf0eb073ed477ea"
"checksum backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" "checksum backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491"
"checksum base-x 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1b20b618342cf9891c292c4f5ac2cde7287cc5c87e87e9c769d617793607dec1"
"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e"
"checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92" "checksum bincode 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ab639324e3ee8774d296864fbc0dbbb256cf1a41c490b94cba90c082915f92"
"checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" "checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
"checksum brotli-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4445dea95f4c2b41cde57cc9fee236ae4dbae88d8fcbdb4750fc1bb5d86aaecd" "checksum brotli-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4445dea95f4c2b41cde57cc9fee236ae4dbae88d8fcbdb4750fc1bb5d86aaecd"
"checksum brotli2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0cb036c3eade309815c15ddbacec5b22c4d1f3983a774ab2eac2e3e9ea85568e" "checksum brotli2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0cb036c3eade309815c15ddbacec5b22c4d1f3983a774ab2eac2e3e9ea85568e"
"checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245" "checksum bstr 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8d6c2c5b58ab920a4f5aeaaca34b4488074e8cc7596af94e6f8c6ff247c60245"
"checksum buf_redux 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f" "checksum buf_redux 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f"
"checksum bumpalo 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad807f2fc2bf185eeb98ff3a901bd46dc5ad58163d0fa4577ba0d25674d71708"
"checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" "checksum byteorder 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5"
"checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c"
"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" "checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
"checksum cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)" = "aa87058dce70a3ff5621797f1506cb837edd02ac4c0ae642b4542dce802908b8" "checksum cc 1.0.47 (registry+https://github.com/rust-lang/crates.io-index)" = "aa87058dce70a3ff5621797f1506cb837edd02ac4c0ae642b4542dce802908b8"
"checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
"checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68" "checksum chrono 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e8493056968583b0193c1bb04d6f7684586f3726992d6c573261941a895dbd68"
"checksum chunked_transfer 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f98beb6554de08a14bd7b5c6014963c79d6a25a1c66b1d4ecb9e733ccba51d6c"
"checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum const-random 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7b641a8c9867e341f3295564203b1c250eb8ce6cb6126e007941f78c4d2ed7fe" "checksum const-random 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7b641a8c9867e341f3295564203b1c250eb8ce6cb6126e007941f78c4d2ed7fe"
@ -2310,12 +2584,11 @@ dependencies = [
"checksum csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9b5cadb6b25c77aeff80ba701712494213f4a8418fcda2ee11b6560c3ad0bf4c" "checksum csv-core 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9b5cadb6b25c77aeff80ba701712494213f4a8418fcda2ee11b6560c3ad0bf4c"
"checksum data-encoding 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f4f47ca1860a761136924ddd2422ba77b2ea54fe8cc75b9040804a0d9d32ad97" "checksum data-encoding 2.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f4f47ca1860a761136924ddd2422ba77b2ea54fe8cc75b9040804a0d9d32ad97"
"checksum deunicode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a0f5bbdedde60605d0719b998e282af68e2b1c50203110211fe4abe857560" "checksum deunicode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a0f5bbdedde60605d0719b998e282af68e2b1c50203110211fe4abe857560"
"checksum discard 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0"
"checksum doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97" "checksum doc-comment 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "923dea538cea0aa3025e8685b20d6ee21ef99c4f77e954a30febbaac5ec73a97"
"checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e" "checksum dtoa 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ea57b42383d091c85abcc2706240b94ab2a8fa1fc81c10ff23c4de06e2a90b5e"
"checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" "checksum either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3"
"checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" "checksum env_logger 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36"
"checksum envconfig 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e84293f9f371139d8bb0913da796c565108c36109539e74f73c0d0b2fa117845"
"checksum envconfig_derive 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "38f6cf35a56246cdf91ef102367259e77b154a023d885aa518718266c4886228"
"checksum error-chain 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ab49e9dcb602294bc42f9a7dfc9bc6e936fca4418ea300dbfb84fe16de0b7d9" "checksum error-chain 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ab49e9dcb602294bc42f9a7dfc9bc6e936fca4418ea300dbfb84fe16de0b7d9"
"checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9" "checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9"
"checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08" "checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08"
@ -2341,7 +2614,7 @@ dependencies = [
"checksum h2 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462" "checksum h2 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462"
"checksum hashbrown 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8e6073d0ca812575946eb5f35ff68dbe519907b25c42530389ff946dc84c6ead" "checksum hashbrown 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8e6073d0ca812575946eb5f35ff68dbe519907b25c42530389ff946dc84c6ead"
"checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
"checksum heed 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b021df76de18f82f716fa6c858fd6bf39aec2c651852055563b5aba51debca81" "checksum heed 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df6c88807a125a2722484f62fa9c9615d85b0779a06467626db1279c32e287ba"
"checksum hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "307c3c9f937f38e3534b1d6447ecf090cafcc9744e4a6360e8b037b2cf5af120" "checksum hermit-abi 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "307c3c9f937f38e3534b1d6447ecf090cafcc9744e4a6360e8b037b2cf5af120"
"checksum http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)" = "d7e06e336150b178206af098a055e3621e8336027e2b4d126bda0bc64824baaf" "checksum http 0.1.19 (registry+https://github.com/rust-lang/crates.io-index)" = "d7e06e336150b178206af098a055e3621e8336027e2b4d126bda0bc64824baaf"
"checksum http-body 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" "checksum http-body 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d"
@ -2358,6 +2631,7 @@ dependencies = [
"checksum jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45" "checksum jemalloc-sys 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d3b9f3f5c9b31aa0f5ed3260385ac205db665baa41d49bb8338008ae94ede45"
"checksum jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "43ae63fcfc45e99ab3d1b29a46782ad679e98436c3169d15a167a1108a724b69" "checksum jemallocator 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "43ae63fcfc45e99ab3d1b29a46782ad679e98436c3169d15a167a1108a724b69"
"checksum jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "f2b1d42ef453b30b7387e113da1c83ab1605d90c5b4e0eb8e96d016ed3b8c160" "checksum jobserver 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "f2b1d42ef453b30b7387e113da1c83ab1605d90c5b4e0eb8e96d016ed3b8c160"
"checksum js-sys 0.3.32 (registry+https://github.com/rust-lang/crates.io-index)" = "1c840fdb2167497b0bd0db43d6dfe61e91637fa72f9d061f8bd17ddc44ba6414"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
"checksum levenshtein_automata 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73a004f877f468548d8d0ac4977456a249d8fabbdb8416c36db163dfc8f2e8ca" "checksum levenshtein_automata 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73a004f877f468548d8d0ac4977456a249d8fabbdb8416c36db163dfc8f2e8ca"
@ -2379,6 +2653,7 @@ dependencies = [
"checksum multipart 0.16.1 (registry+https://github.com/rust-lang/crates.io-index)" = "136eed74cadb9edd2651ffba732b19a450316b680e4f48d6c79e905799e19d01" "checksum multipart 0.16.1 (registry+https://github.com/rust-lang/crates.io-index)" = "136eed74cadb9edd2651ffba732b19a450316b680e4f48d6c79e905799e19d01"
"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" "checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88"
"checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce" "checksum nix 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6c722bee1037d430d0f8e687bbdbf222f27cc6e4e68d5caf630857bb2b6dbdce"
"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
"checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09" "checksum num-integer 0.1.41 (registry+https://github.com/rust-lang/crates.io-index)" = "b85e541ef8255f6cf42bbfe4ef361305c6c135d10919ecc26126c4e5ae94bc09"
"checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32" "checksum num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "6ba9a427cfca2be13aa6f6403b0b7e7368fe982bfa16fccc450ce74c46cd9b32"
"checksum num_cpus 1.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "155394f924cdddf08149da25bfb932d226b4a593ca7468b08191ff6335941af5" "checksum num_cpus 1.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "155394f924cdddf08149da25bfb932d226b4a593ca7468b08191ff6335941af5"
@ -2403,6 +2678,7 @@ dependencies = [
"checksum proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5" "checksum proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5"
"checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" "checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
"checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27" "checksum proc-macro2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "9c9e470a8dc4aeae2dee2f335e8f533e2d4b347e1434e5671afc49b054592f27"
"checksum qstring 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "267047d2df92990367cbbf6b686c363c1518eb98e225b5193c8b936e52ab565a"
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
@ -2428,28 +2704,38 @@ dependencies = [
"checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9" "checksum regex-automata 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "92b73c2a1770c255c240eaa4ee600df1704a38dc3feaa6e949e7fcd4f8dc09f9"
"checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" "checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716"
"checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e"
"checksum ring 0.16.9 (registry+https://github.com/rust-lang/crates.io-index)" = "6747f8da1f2b1fabbee1aaa4eb8a11abf9adef0bf58a41cee45db5d59cecdfac"
"checksum route-recognizer 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "ea509065eb0b3c446acdd0102f0d46567dc30902dc0be91d6552035d92b0f4f8" "checksum route-recognizer 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "ea509065eb0b3c446acdd0102f0d46567dc30902dc0be91d6552035d92b0f4f8"
"checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
"checksum rustls 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b25a18b1bf7387f0145e7f8324e700805aade3842dd3db2e74e4cdeb4677c09e"
"checksum rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d8eb9912bc492db051324d36f5cea56984fc2afeaa5c6fa84e0b0e3cde550f" "checksum rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e9d8eb9912bc492db051324d36f5cea56984fc2afeaa5c6fa84e0b0e3cde550f"
"checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8" "checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8"
"checksum safemem 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" "checksum safemem 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
"checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421" "checksum same-file 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "585e8ddcedc187886a30fa705c47985c3fa88d06624095856b36ca0b82ff4421"
"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" "checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d"
"checksum sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c"
"checksum sdset 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b6d2447743d6c37b6d67af88d9c0f1fc92989e2d9745d9b2f3d305b906a90195" "checksum sdset 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b6d2447743d6c37b6d67af88d9c0f1fc92989e2d9745d9b2f3d305b906a90195"
"checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4b39bd9b0b087684013a792c59e3e07a46a01d2322518d8a1104641a0b1be0" "checksum serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4b39bd9b0b087684013a792c59e3e07a46a01d2322518d8a1104641a0b1be0"
"checksum serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "ca13fc1a832f793322228923fbb3aba9f3f44444898f835d31ad1b74fa0a2bf8" "checksum serde_derive 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)" = "ca13fc1a832f793322228923fbb3aba9f3f44444898f835d31ad1b74fa0a2bf8"
"checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2" "checksum serde_json 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "2f72eb2a68a7dc3f9a691bfda9305a1c017a6215e5a4545c258500d2099a37c2"
"checksum serde_qs 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b232c387059b62b17eb0487bf23de3ddf21b648ad2206fadc6ff3af9e2f3c07" "checksum serde_qs 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "36278a86e341c46a42b0413ac3aa781902af93f5f4b10af098c704f4b96d81d8"
"checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" "checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97"
"checksum sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d"
"checksum siphasher 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" "checksum siphasher 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac"
"checksum siphasher 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "83da420ee8d1a89e640d0948c646c1c088758d3a3c538f943bfa97bdac17929d" "checksum siphasher 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "83da420ee8d1a89e640d0948c646c1c088758d3a3c538f943bfa97bdac17929d"
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
"checksum slice-group-by 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1f7474f0b646d228360ab62ed974744617bc869d959eac8403bfa3665931a7fb" "checksum slice-group-by 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1f7474f0b646d228360ab62ed974744617bc869d959eac8403bfa3665931a7fb"
"checksum slog 2.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1cc9c640a4adbfbcc11ffb95efe5aa7af7309e002adab54b185507dbf2377b99" "checksum slog 2.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1cc9c640a4adbfbcc11ffb95efe5aa7af7309e002adab54b185507dbf2377b99"
"checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6" "checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6"
"checksum sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3"
"checksum spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
"checksum stdweb 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "d022496b16281348b52d0e30ae99e01a73d737b2f45d38fed4edf79f9325a1d5"
"checksum stdweb-derive 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef"
"checksum stdweb-internal-macros 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11"
"checksum stdweb-internal-runtime 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0"
"checksum string 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d24114bfcceb867ca7f71a0d3fe45d45619ec47a6fbfa98cb14e14250bfa5d6d" "checksum string 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d24114bfcceb867ca7f71a0d3fe45d45619ec47a6fbfa98cb14e14250bfa5d6d"
"checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" "checksum strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
"checksum structopt 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c167b61c7d4c126927f5346a4327ce20abf8a186b8041bbeb1ce49e5db49587b" "checksum structopt 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c167b61c7d4c126927f5346a4327ce20abf8a186b8041bbeb1ce49e5db49587b"
@ -2494,6 +2780,8 @@ dependencies = [
"checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20" "checksum unicode-width 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7007dbd421b92cc6e28410fe7362e2e0a2503394908f417b68ec8d1c364c4e20"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c"
"checksum untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60369ef7a31de49bcb3f6ca728d4ba7300d9a1658f94c727d4cab8c8d9f4aece"
"checksum ureq 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "29bd1aba30a6937339e66e4ae3a651a72dda3df66a2584482298d4aa8cd84332"
"checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" "checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a"
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
"checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d" "checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d"
@ -2504,6 +2792,17 @@ dependencies = [
"checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e" "checksum walkdir 2.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9658c94fa8b940eab2250bd5a457f9c48b748420d71293b165c8cdbe2f55f71e"
"checksum want 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" "checksum want 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230"
"checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d" "checksum wasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b89c3ce4ce14bdc6fb6beaf9ec7928ca331de5df7e5ea278375642a2f478570d"
"checksum wasm-bindgen 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)" = "29ae32af33bacd663a9a28241abecf01f2be64e6a185c6139b04f18b6385c5f2"
"checksum wasm-bindgen-backend 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)" = "1845584bd3593442dc0de6e6d9f84454a59a057722f36f005e44665d6ab19d85"
"checksum wasm-bindgen-macro 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)" = "87fcc747e6b73c93d22c947a6334644d22cfec5abd8b66238484dc2b0aeb9fe4"
"checksum wasm-bindgen-macro-support 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)" = "3dc4b3f2c4078c8c4a5f363b92fcf62604c5913cbd16c6ff5aaf0f74ec03f570"
"checksum wasm-bindgen-shared 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)" = "ca0b78d6d3be8589b95d1d49cdc0794728ca734adf36d7c9f07e6459508bb53d"
"checksum wasm-bindgen-webidl 0.2.55 (registry+https://github.com/rust-lang/crates.io-index)" = "3126356474ceb717c8fb5549ae387c9fbf4872818454f4d87708bee997214bb5"
"checksum web-sys 0.3.32 (registry+https://github.com/rust-lang/crates.io-index)" = "98405c0a2e722ed3db341b4c5b70eb9fe0021621f7350bab76df93b09b649bbf"
"checksum webpki 0.21.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d7e664e770ac0110e2384769bcc59ed19e329d81f555916a6e072714957b81b4"
"checksum webpki-roots 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)" = "91cd5736df7f12a964a5067a12c62fa38e1bd8080aff1f80bc29be7c80d19ab4"
"checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164"
"checksum whoami 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c47b0df2e31b0f1a0e4fda3adf4f6f36bbb8655cdd0b6313afbdd7a6f809fcab"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" "checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"

View File

@ -1,10 +1,10 @@
[workspace] [workspace]
members = [ members = [
"meilidb-core", "meilisearch-core",
"meilidb-http", "meilisearch-http",
"meilidb-schema", "meilisearch-schema",
"meilidb-tokenizer", "meilisearch-tokenizer",
"meilidb-types", "meilisearch-types",
] ]
[profile.release] [profile.release]

27
Dockerfile Normal file
View File

@ -0,0 +1,27 @@
# Compile
FROM alpine:3.10 AS compiler
RUN apk update --quiet
RUN apk add curl
RUN apk add build-base
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
WORKDIR /meilisearch
COPY . .
ENV RUSTFLAGS="-C target-feature=-crt-static"
RUN $HOME/.cargo/bin/cargo build --release
# Run
FROM alpine:3.10
RUN apk update --quiet
RUN apk add libgcc
COPY --from=compiler /meilisearch/target/release/meilisearch .
ENV MEILI_HTTP_ADDR 0.0.0.0:8080
CMD ./meilisearch

View File

@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) [year] [fullname] Copyright (c) 2019-2020 Meili SAS
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

110
README.md
View File

@ -1,53 +1,56 @@
# MeiliDB # MeiliSearch
[![Build Status](https://dev.azure.com/thomas0884/thomas/_apis/build/status/meilisearch.MeiliDB?branchName=master)](https://dev.azure.com/thomas0884/thomas/_build/latest?definitionId=1&branchName=master) [![Build Status](https://github.com/meilisearch/MeiliSearch/workflows/Cargo%20test/badge.svg)](https://github.com/meilisearch/MeiliSearch/actions)
[![dependency status](https://deps.rs/repo/github/meilisearch/MeiliDB/status.svg)](https://deps.rs/repo/github/meilisearch/MeiliDB) [![dependency status](https://deps.rs/repo/github/meilisearch/MeiliSearch/status.svg)](https://deps.rs/repo/github/meilisearch/MeiliSearch)
[![License](https://img.shields.io/badge/license-commons%20clause-lightgrey)](https://commonsclause.com/) [![License](https://img.shields.io/badge/license-MIT-informational)](https://github.com/meilisearch/MeiliSearch/blob/master/LICENSE)
Ultra relevant and instant full-text search API. Ultra relevant and instant full-text search API 🔍
MeiliSearch is a powerful, fast, open-source, easy to use and deploy search engine. The search and indexation are fully customizable and handles features like typo-tolerance, filters, and ranking. MeiliSearch is a powerful, fast, open-source, easy to use, and deploy search engine. The search and indexation are fully customizable and handles features like typo-tolerance, filters, and synonyms.
## Features ## What MeiliSearch has to offer
* Search as-you-type experience (answers < 50ms)
* Full-text search
* Typo tolerant (understands typos and spelling mistakes)
* Supports Kanji
* Supports Synonym
* Easy to install, deploy, and maintain
* Whole documents returned
* Highly customizable
* RESTfull API
- Provides [6 default ranking criteria](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/criterion/mod.rs#L107-L113) used to [bucket sort](https://en.wikipedia.org/wiki/Bucket_sort) documents For more [details about those features, go to our documentation](https://docs.meilisearch.com/introduction/features.html).
- Accepts [custom criteria](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/criterion/mod.rs#L24-L33) and can apply them in any custom order
- Support [ranged queries](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/query_builder.rs#L283), useful for paginating results
- Can [distinct](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/query_builder.rs#L265-L270) and [filter](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/query_builder.rs#L246-L259) returned documents based on context defined rules
- Searches for [concatenated](https://github.com/meilisearch/MeiliDB/pull/164) and [splitted query words](https://github.com/meilisearch/MeiliDB/pull/232) to improve the search quality.
- Can store complete documents or only [user schema specified fields](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-schema/src/lib.rs#L265-L279)
- The [default tokenizer](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-tokenizer/src/lib.rs) can index latin and kanji based languages
- Returns [the matching text areas](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/lib.rs#L66-L88), useful to highlight matched words in results
- Accepts query time search config like the [searchable attributes](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/query_builder.rs#L272-L275)
- Supports [runtime incremental indexing](https://github.com/meilisearch/MeiliDB/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilidb-core/src/store/mod.rs#L143-L173)
It uses [LMDB](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database) as the internal key-value store. The key-value store allows us to handle updates and queries with small memory and CPU overheads. The whole ranking system is [data oriented](https://github.com/meilisearch/MeiliDB/issues/82) and provides great performances.
You can [read the deep dive](deep-dive.md) if you want more information on the engine, it describes the whole process of generating updates and handling queries or you can take a look at the [typos and ranking rules](typos-ranking-rules.md) if you want to know the default rules used to sort the documents.
We will be proud if you submit issues and pull requests. You can help to grow this project and start contributing by checking [issues tagged "good-first-issue"](https://github.com/meilisearch/MeiliDB/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22). It is a good start!
[![crates.io demo gif](misc/crates-io-demo.gif)](https://crates.meilisearch.com) [![crates.io demo gif](misc/crates-io-demo.gif)](https://crates.meilisearch.com)
> Meili helps the Rust community find crates on [crates.meilisearch.com](https://crates.meilisearch.com) > Meili helps the Rust community find crates on [crates.meilisearch.com](https://crates.meilisearch.com)
### In-depth features
- Provides [6 default ranking criteria](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/criterion/mod.rs#L107-L113) used to [bucket sort](https://en.wikipedia.org/wiki/Bucket_sort) documents
- Accepts [custom criteria](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/criterion/mod.rs#L24-L33) and can apply them in any custom order
- Support [ranged queries](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/query_builder.rs#L283), useful for paginating results
- Can [distinct](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/query_builder.rs#L265-L270) and [filter](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/query_builder.rs#L246-L259) returned documents based on context defined rules
- Searches for [concatenated](https://github.com/meilisearch/MeiliSearch/pull/164) and [splitted query words](https://github.com/meilisearch/MeiliSearch/pull/232) to improve the search quality.
- Can store complete documents or only [user schema specified fields](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-schema/src/lib.rs#L265-L279)
- The [default tokenizer](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-tokenizer/src/lib.rs) can index latin and kanji based languages
- Returns [the matching text areas](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/lib.rs#L66-L88), useful to highlight matched words in results
- Accepts query time search config like the [searchable attributes](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/query_builder.rs#L272-L275)
- Supports [runtime incremental indexing](https://github.com/meilisearch/MeiliSearch/blob/dc5c42821e1340e96cb90a3da472264624a26326/meilisearch-core/src/store/mod.rs#L143-L173)
## Quick Start ## Quick Start
You can deploy your own instant, relevant and typo-tolerant MeiliDB search engine by yourself too. You can deploy your instant, relevant, and typo-tolerant MeiliSearch search engine by yourself too.
Something similar to the demo above can be achieve by following these little three steps first. Something similar to the demo above can be achieved by following these little three steps first.
You will need to create your own web front display to make it pretty though. You still need to create your front-end to make it pretty, though.
### Deploy the Server ### Deploy the Server
If you have not installed Rust and its package manager `cargo` yet, go to [the installation page](https://www.rust-lang.org/tools/install).<br/> If you have not yet installed Rust and its package manager `cargo`, go to [the installation page](https://www.rust-lang.org/tools/install).<br/>
You can deploy the server on your own machine, it will listen to HTTP requests on the 8080 port by default. You can deploy the server on your machine; it listens to HTTP requests on the 8080 port by default.
```bash ```bash
rustup override set nightly
cargo run --release cargo run --release
``` ```
@ -58,15 +61,15 @@ RUST_LOG=info cargo run --release
### Create an Index and Upload Some Documents ### Create an Index and Upload Some Documents
MeiliDB can serve multiple indexes, with different kinds of documents, MeiliSearch can serve multiple indexes, with different kinds of documents,
therefore, it is required to create the index before sending documents to it. therefore, it is required to create the index before sending documents to it.
```bash ```bash
curl -i -X POST 'http://127.0.0.1:8080/indexes/movies' curl -i -X POST 'http://127.0.0.1:8080/indexes' --data '{ "name": "Movies", "uid": "movies" }'
``` ```
Now that the server knows about our brand new index, we can send it data. Now that the server knows about our brand new index, we can send it data.
We provided you a little dataset, it is available in the `datasets/` directory. We provided you a small dataset that is available in the `datasets/` directory.
```bash ```bash
curl -i -X POST 'http://127.0.0.1:8080/indexes/movies/documents' \ curl -i -X POST 'http://127.0.0.1:8080/indexes/movies/documents' \
@ -77,7 +80,7 @@ curl -i -X POST 'http://127.0.0.1:8080/indexes/movies/documents' \
### Search for Documents ### Search for Documents
The search engine is now aware of our documents and can serve those via our HTTP server again. The search engine is now aware of our documents and can serve those via our HTTP server again.
The [`jq` command line tool](https://stedolan.github.io/jq/) can greatly help you read the server responses. The [`jq` command-line tool](https://stedolan.github.io/jq/) can significantly help you read the server responses.
```bash ```bash
curl 'http://127.0.0.1:8080/indexes/movies/search?q=botman' curl 'http://127.0.0.1:8080/indexes/movies/search?q=botman'
@ -108,14 +111,12 @@ curl 'http://127.0.0.1:8080/indexes/movies/search?q=botman'
} }
``` ```
## Performances ## Performances
With a dataset composed of _100 353_ documents with _352_ attributes each and _3_ of them indexed. With a dataset composed of _100 353_ documents with _352_ attributes each and _3_ of them indexed.
So more than _300 000_ fields indexed for _35 million_ stored we can handle more than _2.8k req/sec_ with an average response time of _9 ms_ on an Intel i7-7700 (8) @ 4.2GHz. So more than _300 000_ fields indexed for _35 million_ stored we can handle more than _2.8k req/sec_ with an average response time of _9 ms_ on an Intel i7-7700 (8) @ 4.2GHz.
Requests are made using [wrk](https://github.com/wg/wrk) and scripted to simulate real users queries. Requests are made using [wrk](https://github.com/wg/wrk) and scripted to simulate real users' queries.
``` ```
Running 10s test @ http://localhost:2230 Running 10s test @ http://localhost:2230
@ -128,7 +129,7 @@ Requests/sec: 2806.46
Transfer/sec: 759.17KB Transfer/sec: 759.17KB
``` ```
We also indexed a dataset containing something like _12 millions_ cities names in _24 minutes_ on a machine with _8 cores_, _64 GB of RAM_ and a _300 GB NMVe_ SSD.<br/> We also indexed a dataset containing something like _12 millions_ cities names in _24 minutes_ on a machine with _8 cores_, _64 GB of RAM_, and a _300 GB NMVe_ SSD.<br/>
The resulting database was _16 GB_ and search results were between _30 ms_ and _4 seconds_ for short prefix queries. The resulting database was _16 GB_ and search results were between _30 ms_ and _4 seconds_ for short prefix queries.
### Notes ### Notes
@ -136,25 +137,18 @@ The resulting database was _16 GB_ and search results were between _30 ms_ and _
With Rust 1.32 the allocator has been [changed to use the system allocator](https://blog.rust-lang.org/2019/01/17/Rust-1.32.0.html#jemalloc-is-removed-by-default). With Rust 1.32 the allocator has been [changed to use the system allocator](https://blog.rust-lang.org/2019/01/17/Rust-1.32.0.html#jemalloc-is-removed-by-default).
We have seen much better performances when [using jemalloc as the global allocator](https://github.com/alexcrichton/jemallocator#documentation). We have seen much better performances when [using jemalloc as the global allocator](https://github.com/alexcrichton/jemallocator#documentation).
## Usage and Examples ## How it works
MeiliDB also provides an example binary that is mostly used for features testing. MeiliSearch uses [LMDB](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database) as the internal key-value store. The key-value store allows us to handle updates and queries with small memory and CPU overheads. The whole ranking system is [data oriented](https://github.com/meilisearch/MeiliSearch/issues/82) and provides excellent great performances.
Notice that the example binary is faster to index data as it does read direct CSV files and not JSON HTTP payloads.
The _index_ subcommand has been made to create an index and inject documents into it. Using the command line below, the index will be named _movies_ and the _19 700_ movies of the `datasets/` will be injected in MeiliDB. You can [read the deep dive](deep-dive.md) if you want more information on the engine; it describes the whole process of generating updates and handling queries. Also, you can take a look at the [typos and ranking rules](typos-ranking-rules.md) if you want to know the default rules used to sort the documents.
```bash ## Contributing
cargo run --release --example from_file -- \
index example.mdb datasets/movies/movies.csv \
--schema datasets/movies/schema.toml
```
Once the first command is done, you can query the freshly created _movies_ index using the _search_ subcomand. In this example we filtered the dataset to only show _non-adult_ movies using the non-definitive `!adult` syntax filter. We will be glad if you submit issues and pull requests. You can help to grow this project and start contributing by checking [issues tagged "good-first-issue"](https://github.com/meilisearch/MeiliSearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22). It is a good start!
```bash ### Analytic Events
cargo run --release --example from_file -- \
search example.mdb \ We send events to our Amplitude instance to be aware of the number of people who use MeiliSearch.<br/>
--number-results 4 \ We only send the platform on which the server runs once by day. No other information is sent.<br/>
--filter '!adult' \ If you do not want us to send events, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
id popularity adult original_title
```

View File

@ -1,52 +0,0 @@
---
trigger:
branches:
include: [ master ]
pr: [ master ]
jobs:
- job: test
pool:
vmImage: 'Ubuntu 16.04'
container: tpayet/chiquitita:latest
steps:
- script: |
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly
$HOME/.cargo/bin/rustup component add rustfmt
displayName: 'Install rustc and components'
- script: |
$HOME/.cargo/bin/cargo check
displayName: 'Check MeiliDB'
- script: |
$HOME/.cargo/bin/cargo test
displayName: 'Test MeiliDB'
- script: |
$HOME/.cargo/bin/cargo fmt --all -- --check
displayName: 'Fmt MeiliDB'
- job: build
dependsOn:
- test
condition: succeeded()
pool:
vmImage: 'Ubuntu 16.04'
container: tpayet/chiquitita:latest
steps:
- script: |
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly
$HOME/.cargo/bin/rustup component add rustfmt
displayName: 'Install rustc and components'
- script: |
$HOME/.cargo/bin/cargo build --release
displayName: 'Build MeiliDB'
- task: CopyFiles@2
inputs:
contents: '$(System.DefaultWorkingDirectory)/target/release/meilidb-http'
targetFolder: $(Build.ArtifactStagingDirectory)
displayName: 'Copy build'
- task: PublishBuildArtifacts@1
inputs:
artifactName: meilidb
displayName: 'Upload artifacts'

View File

@ -1 +1 @@
_datas in movies.csv are from https://www.themoviedb.org/_ _datas in movies.csv are from https://www.themoviedb.org/_

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,8 @@
# A deep dive in MeiliDB # A deep dive in MeiliSearch
On the 15 of May 2019. On the 15 of May 2019.
MeiliDB is a full text search engine based on a final state transducer named [fst](https://github.com/BurntSushi/fst) and a key-value store named [sled](https://github.com/spacejam/sled). The goal of a search engine is to store data and to respond to queries as accurate and fast as possible. To achieve this it must save the matching words in an [inverted index](https://en.wikipedia.org/wiki/Inverted_index). MeiliSearch is a full text search engine based on a final state transducer named [fst](https://github.com/BurntSushi/fst) and a key-value store named [sled](https://github.com/spacejam/sled). The goal of a search engine is to store data and to respond to queries as accurate and fast as possible. To achieve this it must save the matching words in an [inverted index](https://en.wikipedia.org/wiki/Inverted_index).
<!-- MarkdownTOC autolink="true" --> <!-- MarkdownTOC autolink="true" -->
@ -22,7 +22,7 @@ MeiliDB is a full text search engine based on a final state transducer named [fs
## Where is the data stored? ## Where is the data stored?
MeiliDB is entirely backed by a key-value store like any good database (i.e. Postgres, MySQL). This brings a great flexibility in the way documents can be stored and updates handled along time. MeiliSearch is entirely backed by a key-value store like any good database (i.e. Postgres, MySQL). This brings a great flexibility in the way documents can be stored and updates handled along time.
[sled will brings some](https://github.com/spacejam/sled/tree/434533332a3f485e6d2e467023be0a0b55d3a1af#plans) of the [A.C.I.D. properties](https://en.wikipedia.org/wiki/ACID_(computer_science)) to help us be sure the saved data is consistent. [sled will brings some](https://github.com/spacejam/sled/tree/434533332a3f485e6d2e467023be0a0b55d3a1af#plans) of the [A.C.I.D. properties](https://en.wikipedia.org/wiki/ACID_(computer_science)) to help us be sure the saved data is consistent.
@ -34,7 +34,7 @@ It contain the inverted word index, the schema and the documents fields.
### The inverted word index ### The inverted word index
[The inverted word index](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-data/src/database/words_index.rs) is a sled Tree dedicated to store and give access to all documents that contains a specific word. The information stored under the word is simply a big ordered array of where in the document the word has been found. In other word, a big list of [`DocIndex`](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-core/src/lib.rs#L35-L51). [The inverted word index](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-data/src/database/words_index.rs) is a sled Tree dedicated to store and give access to all documents that contains a specific word. The information stored under the word is simply a big ordered array of where in the document the word has been found. In other word, a big list of [`DocIndex`](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-core/src/lib.rs#L35-L51).
#### A final state transducer #### A final state transducer
@ -42,27 +42,27 @@ _...also abbreviated fst_
This is the first entry point of the engine, you can read more about how it work with the beautiful blog post of @BurntSushi, [Index 1,600,000,000 Keys with Automata and Rust](https://blog.burntsushi.net/transducers/). This is the first entry point of the engine, you can read more about how it work with the beautiful blog post of @BurntSushi, [Index 1,600,000,000 Keys with Automata and Rust](https://blog.burntsushi.net/transducers/).
To make it short it is a powerful way to store all the words that are present in the indexed documents. You construct it by giving it all the words you want to index. When you want to search in it you can provide any automaton you want, in MeiliDB [a custom levenshtein automaton](https://github.com/tantivy-search/levenshtein-automata/) is used. To make it short it is a powerful way to store all the words that are present in the indexed documents. You construct it by giving it all the words you want to index. When you want to search in it you can provide any automaton you want, in MeiliSearch [a custom levenshtein automaton](https://github.com/tantivy-search/levenshtein-automata/) is used.
#### Document indexes #### Document indexes
The `fst` will only return the words that match with the search automaton but the goal of the search engine is to retrieve all matches in all the documents when a query is made. You want it to return some sort of position in an attribute in a document, an information about where the given word matched. The `fst` will only return the words that match with the search automaton but the goal of the search engine is to retrieve all matches in all the documents when a query is made. You want it to return some sort of position in an attribute in a document, an information about where the given word matched.
To make it possible we retrieve all of the `DocIndex` corresponding to all the matching words in the fst, we use the [`WordsIndex`](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-data/src/database/words_index.rs#L11-L21) Tree to get the `DocIndexes` corresponding the words. To make it possible we retrieve all of the `DocIndex` corresponding to all the matching words in the fst, we use the [`WordsIndex`](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-data/src/database/words_index.rs#L11-L21) Tree to get the `DocIndexes` corresponding the words.
### The schema ### The schema
The schema is a data structure that represents which documents attributes should be stored and which should be indexed. It is stored under a the [`MainIndex`](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-data/src/database/main_index.rs#L12) Tree and given to MeiliDB only at the creation of an index. The schema is a data structure that represents which documents attributes should be stored and which should be indexed. It is stored under a the [`MainIndex`](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-data/src/database/main_index.rs#L12) Tree and given to MeiliSearch only at the creation of an index.
Each document attribute is associated to a unique 16 bit number named [`SchemaAttr`](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-data/src/schema.rs#L186). Each document attribute is associated to a unique 16 bit number named [`SchemaAttr`](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-data/src/schema.rs#L186).
In the future, this schema type could be given along with updates, the database could be able to handled a new schema and reindex the database according to the new one. In the future, this schema type could be given along with updates, the database could be able to handled a new schema and reindex the database according to the new one.
### Document attributes ### Document attributes
When the engine handle a query the result that the requester want is a document, not only the [`Matches`](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-core/src/lib.rs#L62-L88) associated to it, fields of the original document must be returned too. When the engine handle a query the result that the requester want is a document, not only the [`Matches`](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-core/src/lib.rs#L62-L88) associated to it, fields of the original document must be returned too.
So MeiliDB again uses the power of the underlying key-value store and save the documents attributes marked as _STORE_ in the schema. The dedicated Tree for this information is the [`DocumentsIndex`](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-data/src/database/documents_index.rs#L11). So MeiliSearch again uses the power of the underlying key-value store and save the documents attributes marked as _STORE_ in the schema. The dedicated Tree for this information is the [`DocumentsIndex`](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-data/src/database/documents_index.rs#L11).
When a document field is saved in the key-value store its value is binary encoded using [message pack](https://github.com/3Hren/msgpack-rust), so a document must be serializable using serde. When a document field is saved in the key-value store its value is binary encoded using [message pack](https://github.com/3Hren/msgpack-rust), so a document must be serializable using serde.
@ -70,26 +70,26 @@ When a document field is saved in the key-value store its value is binary encode
## How is a request processed? ## How is a request processed?
Now that we have our inverted index we are able to return results based on a query. In the MeiliDB universe a query is a simple string containing words. Now that we have our inverted index we are able to return results based on a query. In the MeiliSearch universe a query is a simple string containing words.
### Query lexemes ### Query lexemes
The first step to be able to call the underlying structures is to split the query in words, for that we use a [custom tokenizer](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-tokenizer/src/lib.rs#L82-L84). Note that a tokenizer is specialized for a human language, this is the hard part. The first step to be able to call the underlying structures is to split the query in words, for that we use a [custom tokenizer](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-tokenizer/src/lib.rs#L82-L84). Note that a tokenizer is specialized for a human language, this is the hard part.
### Automatons and query index ### Automatons and query index
So to query the fst we need an automaton, in MeiliDB we use a [levenshtein automaton](https://en.wikipedia.org/wiki/Levenshtein_automaton), this automaton is constructed using a string and a maximum distance. According to the [Algolia's blog post](https://blog.algolia.com/inside-the-algolia-engine-part-3-query-processing/#algolia%e2%80%99s-way-of-searching-for-alternatives) we [created the DFAs](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-core/src/automaton.rs#L59-L78) with different settings. So to query the fst we need an automaton, in MeiliSearch we use a [levenshtein automaton](https://en.wikipedia.org/wiki/Levenshtein_automaton), this automaton is constructed using a string and a maximum distance. According to the [Algolia's blog post](https://blog.algolia.com/inside-the-algolia-engine-part-3-query-processing/#algolia%e2%80%99s-way-of-searching-for-alternatives) we [created the DFAs](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-core/src/automaton.rs#L59-L78) with different settings.
Thanks to the power of the fst library [it is possible to union multiple automatons](https://docs.rs/fst/0.3.2/fst/map/struct.OpBuilder.html#method.union) on the same fst set. The `Stream` is able to return all the matching words. We use these words to find the whole list of `DocIndexes` associated. Thanks to the power of the fst library [it is possible to union multiple automatons](https://docs.rs/fst/0.3.2/fst/map/struct.OpBuilder.html#method.union) on the same fst set. The `Stream` is able to return all the matching words. We use these words to find the whole list of `DocIndexes` associated.
With all these informations it is possible [to reconstruct a list of all the `DocIndexes` associated](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-core/src/query_builder.rs#L103-L130) with the words queried. With all these informations it is possible [to reconstruct a list of all the `DocIndexes` associated](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-core/src/query_builder.rs#L103-L130) with the words queried.
### Sort by criteria ### Sort by criteria
Now that we are able to get a big list of [DocIndexes](https://github.com/Kerollmops/MeiliDB/blob/550dc1e99224e386516877450320f694947332d4/src/lib.rs#L21-L36) it is not enough to sort them by criteria, we need more informations like the levenshtein distance or the fact that a query word match exactly the word stored in the fst. So [we stuff it a little bit](https://github.com/Kerollmops/MeiliDB/blob/550dc1e99224e386516877450320f694947332d4/src/rank/query_builder.rs#L86-L93), and aggregate all these [Matches](https://github.com/Kerollmops/MeiliDB/blob/550dc1e99224e386516877450320f694947332d4/src/lib.rs#L47-L74) for each document. This way it will be easy to sort a simple vector of document using a bunch of functions. Now that we are able to get a big list of [DocIndexes](https://github.com/Kerollmops/MeiliSearch/blob/550dc1e99224e386516877450320f694947332d4/src/lib.rs#L21-L36) it is not enough to sort them by criteria, we need more informations like the levenshtein distance or the fact that a query word match exactly the word stored in the fst. So [we stuff it a little bit](https://github.com/Kerollmops/MeiliSearch/blob/550dc1e99224e386516877450320f694947332d4/src/rank/query_builder.rs#L86-L93), and aggregate all these [Matches](https://github.com/Kerollmops/MeiliSearch/blob/550dc1e99224e386516877450320f694947332d4/src/lib.rs#L47-L74) for each document. This way it will be easy to sort a simple vector of document using a bunch of functions.
With this big list of documents and associated matches [we are able to sort only the part of the slice that we want](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-core/src/query_builder.rs#L160-L188) using bucket sorting. [Each criterion](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-core/src/criterion/mod.rs#L95-L101) is evaluated on each subslice without copy, thanks to [GroupByMut](https://docs.rs/slice-group-by/0.2.4/slice_group_by/) which, I hope [will soon be merged](https://github.com/rust-lang/rfcs/pull/2477). With this big list of documents and associated matches [we are able to sort only the part of the slice that we want](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-core/src/query_builder.rs#L160-L188) using bucket sorting. [Each criterion](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-core/src/criterion/mod.rs#L95-L101) is evaluated on each subslice without copy, thanks to [GroupByMut](https://docs.rs/slice-group-by/0.2.4/slice_group_by/) which, I hope [will soon be merged](https://github.com/rust-lang/rfcs/pull/2477).
Note that it is possible to customize the criteria used by using the `QueryBuilder::with_criteria` constructor, this way you can implement some custom ranking based on the document attributes using the appropriate structure and the [`document` method](https://github.com/meilisearch/MeiliDB/blob/3db823de002243004612e36a19b4578d800dab97/meilidb-data/src/database/index.rs#L86). Note that it is possible to customize the criteria used by using the `QueryBuilder::with_criteria` constructor, this way you can implement some custom ranking based on the document attributes using the appropriate structure and the [`document` method](https://github.com/meilisearch/MeiliSearch/blob/3db823de002243004612e36a19b4578d800dab97/meilisearch-data/src/database/index.rs#L86).
At this point, MeiliDB work is over 🎉 At this point, MeiliSearch work is over 🎉

View File

@ -1,125 +0,0 @@
use crate::RankedMap;
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
use heed::Result as ZResult;
use meilidb_schema::Schema;
use std::sync::Arc;
const CUSTOMS_KEY: &str = "customs-key";
const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents";
const RANKED_MAP_KEY: &str = "ranked-map";
const SCHEMA_KEY: &str = "schema";
const SYNONYMS_KEY: &str = "synonyms";
const STOP_WORDS_KEY: &str = "stop-words";
const WORDS_KEY: &str = "words";
#[derive(Copy, Clone)]
pub struct Main {
pub(crate) main: heed::PolyDatabase,
}
impl Main {
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> {
self.main.clear(writer)
}
pub fn put_words_fst(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes();
self.main.put::<Str, ByteSlice>(writer, WORDS_KEY, bytes)
}
pub fn words_fst(self, reader: &heed::RoTxn) -> ZResult<Option<fst::Set>> {
match self.main.get::<Str, ByteSlice>(reader, WORDS_KEY)? {
Some(bytes) => {
let len = bytes.len();
let bytes = Arc::new(bytes.to_owned());
let fst = fst::raw::Fst::from_shared_bytes(bytes, 0, len).unwrap();
Ok(Some(fst::Set::from(fst)))
}
None => Ok(None),
}
}
pub fn put_schema(self, writer: &mut heed::RwTxn, schema: &Schema) -> ZResult<()> {
self.main
.put::<Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)
}
pub fn schema(self, reader: &heed::RoTxn) -> ZResult<Option<Schema>> {
self.main
.get::<Str, SerdeBincode<Schema>>(reader, SCHEMA_KEY)
}
pub fn put_ranked_map(self, writer: &mut heed::RwTxn, ranked_map: &RankedMap) -> ZResult<()> {
self.main
.put::<Str, SerdeBincode<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map)
}
pub fn ranked_map(self, reader: &heed::RoTxn) -> ZResult<Option<RankedMap>> {
self.main
.get::<Str, SerdeBincode<RankedMap>>(reader, RANKED_MAP_KEY)
}
pub fn put_synonyms_fst(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes();
self.main.put::<Str, ByteSlice>(writer, SYNONYMS_KEY, bytes)
}
pub fn synonyms_fst(self, reader: &heed::RoTxn) -> ZResult<Option<fst::Set>> {
match self.main.get::<Str, ByteSlice>(reader, SYNONYMS_KEY)? {
Some(bytes) => {
let len = bytes.len();
let bytes = Arc::new(bytes.to_owned());
let fst = fst::raw::Fst::from_shared_bytes(bytes, 0, len).unwrap();
Ok(Some(fst::Set::from(fst)))
}
None => Ok(None),
}
}
pub fn put_stop_words_fst(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes();
self.main
.put::<Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes)
}
pub fn stop_words_fst(self, reader: &heed::RoTxn) -> ZResult<Option<fst::Set>> {
match self.main.get::<Str, ByteSlice>(reader, STOP_WORDS_KEY)? {
Some(bytes) => {
let len = bytes.len();
let bytes = Arc::new(bytes.to_owned());
let fst = fst::raw::Fst::from_shared_bytes(bytes, 0, len).unwrap();
Ok(Some(fst::Set::from(fst)))
}
None => Ok(None),
}
}
pub fn put_number_of_documents<F>(self, writer: &mut heed::RwTxn, f: F) -> ZResult<u64>
where
F: Fn(u64) -> u64,
{
let new = self.number_of_documents(writer).map(f)?;
self.main
.put::<Str, OwnedType<u64>>(writer, NUMBER_OF_DOCUMENTS_KEY, &new)?;
Ok(new)
}
pub fn number_of_documents(self, reader: &heed::RoTxn) -> ZResult<u64> {
match self
.main
.get::<Str, OwnedType<u64>>(reader, NUMBER_OF_DOCUMENTS_KEY)?
{
Some(value) => Ok(value),
None => Ok(0),
}
}
pub fn put_customs(self, writer: &mut heed::RwTxn, customs: &[u8]) -> ZResult<()> {
self.main
.put::<Str, ByteSlice>(writer, CUSTOMS_KEY, customs)
}
pub fn customs<'txn>(self, reader: &'txn heed::RoTxn) -> ZResult<Option<&'txn [u8]>> {
self.main.get::<Str, ByteSlice>(reader, CUSTOMS_KEY)
}
}

View File

@ -1,2 +0,0 @@
pub mod meilidb;
pub mod tide;

View File

@ -1,56 +0,0 @@
use envconfig::Envconfig;
use structopt::StructOpt;
#[derive(Debug, Clone, StructOpt, Envconfig)]
struct Vars {
/// The destination where the database must be created.
#[structopt(long)]
#[envconfig(from = "MEILI_DATABASE_PATH")]
pub database_path: Option<String>,
/// The addr on which the http server will listen.
#[structopt(long)]
#[envconfig(from = "MEILI_HTTP_ADDR")]
pub http_addr: Option<String>,
#[structopt(long)]
#[envconfig(from = "MEILI_ADMIN_TOKEN")]
pub admin_token: Option<String>,
}
#[derive(Clone, Debug)]
pub struct Opt {
pub database_path: String,
pub http_addr: String,
pub admin_token: Option<String>,
}
impl Default for Opt {
fn default() -> Self {
Opt {
database_path: String::from("/tmp/meilidb"),
http_addr: String::from("127.0.0.1:8080"),
admin_token: None,
}
}
}
impl Opt {
pub fn new() -> Self {
let default = Self::default();
let args = Vars::from_args();
let env = Vars::init().unwrap();
Self {
database_path: env
.database_path
.or(args.database_path)
.unwrap_or(default.database_path),
http_addr: env
.http_addr
.or(args.http_addr)
.unwrap_or(default.http_addr),
admin_token: env.admin_token.or(args.admin_token).or(default.admin_token),
}
}
}

View File

@ -1,195 +0,0 @@
use http::StatusCode;
use meilidb_core::ProcessedUpdateResult;
use meilidb_schema::Schema;
use serde_json::json;
use tide::response::IntoResponse;
use tide::{Context, Response};
use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt;
use crate::models::schema::SchemaBody;
use crate::models::token::ACL::*;
use crate::routes::document::IndexUpdateResponse;
use crate::Data;
pub async fn list_indexes(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let list = ctx
.state()
.db
.indexes_names()
.map_err(ResponseError::internal)?;
Ok(tide::response::json(list))
}
pub async fn get_index_schema(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let index = ctx.index()?;
let env = &ctx.state().db.env;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let schema = index
.main
.schema(&reader)
.map_err(ResponseError::create_index)?;
match schema {
Some(schema) => {
let schema = SchemaBody::from(schema);
Ok(tide::response::json(schema))
}
None => Ok(
tide::response::json(json!({ "message": "missing index schema" }))
.with_status(StatusCode::NOT_FOUND)
.into_response(),
),
}
}
pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?;
let index_name = ctx.url_param("index")?;
let body = ctx.body_bytes().await.map_err(ResponseError::bad_request)?;
let schema: Option<Schema> = if body.is_empty() {
None
} else {
serde_json::from_slice::<SchemaBody>(&body)
.map_err(ResponseError::bad_request)
.map(|s| Some(s.into()))?
};
let db = &ctx.state().db;
let created_index = match db.create_index(&index_name) {
Ok(index) => index,
Err(e) => return Err(ResponseError::create_index(e)),
};
let env = &db.env;
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
match schema {
Some(schema) => {
let update_id = created_index
.schema_update(&mut writer, schema.clone())
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body)
.with_status(StatusCode::CREATED)
.into_response())
}
None => Ok(Response::new(tide::Body::empty())
.with_status(StatusCode::NO_CONTENT)
.into_response()),
}
}
pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?;
let index_name = ctx.url_param("index")?;
let schema = ctx
.body_json::<SchemaBody>()
.await
.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let env = &db.env;
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
let index = db
.open_index(&index_name)
.ok_or(ResponseError::index_not_found(index_name))?;
let schema: meilidb_schema::Schema = schema.into();
let update_id = index
.schema_update(&mut writer, schema.clone())
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body)
.with_status(StatusCode::ACCEPTED)
.into_response())
}
pub async fn get_update_status(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let env = &ctx.state().db.env;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let update_id = ctx
.param::<u64>("update_id")
.map_err(|e| ResponseError::bad_parameter("update_id", e))?;
let index = ctx.index()?;
let status = index
.update_status(&reader, update_id)
.map_err(ResponseError::internal)?;
let response = match status {
Some(status) => tide::response::json(status)
.with_status(StatusCode::OK)
.into_response(),
None => tide::response::json(json!({ "message": "unknown update id" }))
.with_status(StatusCode::NOT_FOUND)
.into_response(),
};
Ok(response)
}
pub async fn get_all_updates_status(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let env = &ctx.state().db.env;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let index = ctx.index()?;
let all_status = index
.all_updates_status(&reader)
.map_err(ResponseError::internal)?;
let response = tide::response::json(all_status)
.with_status(StatusCode::OK)
.into_response();
Ok(response)
}
pub async fn delete_index(ctx: Context<Data>) -> SResult<StatusCode> {
ctx.is_allowed(IndexesWrite)?;
let index_name = ctx.url_param("index")?;
let found = ctx
.state()
.db
.delete_index(&index_name)
.map_err(ResponseError::internal)?;
if found {
Ok(StatusCode::NO_CONTENT)
} else {
Ok(StatusCode::NOT_FOUND)
}
}
pub fn index_update_callback(index_name: &str, data: &Data, _status: ProcessedUpdateResult) {
let env = &data.db.env;
let mut writer = env.write_txn().unwrap();
data.compute_stats(&mut writer, &index_name).unwrap();
data.set_last_update(&mut writer, &index_name).unwrap();
writer.commit().unwrap();
}

View File

@ -1,6 +1,6 @@
[package] [package]
name = "meilidb-core" name = "meilisearch-core"
version = "0.7.0" version = "0.8.2"
authors = ["Kerollmops <clement@meilisearch.com>"] authors = ["Kerollmops <clement@meilisearch.com>"]
edition = "2018" edition = "2018"
@ -14,18 +14,18 @@ deunicode = "1.0.0"
env_logger = "0.7.0" env_logger = "0.7.0"
fst = { version = "0.3.5", default-features = false } fst = { version = "0.3.5", default-features = false }
hashbrown = { version = "0.6.0", features = ["serde"] } hashbrown = { version = "0.6.0", features = ["serde"] }
heed = "0.5.0" heed = "0.6.0"
levenshtein_automata = { version = "0.1.1", features = ["fst_automaton"] } levenshtein_automata = { version = "0.1.1", features = ["fst_automaton"] }
log = "0.4.8" log = "0.4.8"
meilidb-schema = { path = "../meilidb-schema", version = "0.6.0" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.8.2" }
meilidb-tokenizer = { path = "../meilidb-tokenizer", version = "0.6.0" } meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.8.2" }
meilidb-types = { path = "../meilidb-types", version = "0.1.0" } meilisearch-types = { path = "../meilisearch-types", version = "0.8.2" }
once_cell = "1.2.0" once_cell = "1.2.0"
ordered-float = { version = "1.0.2", features = ["serde"] } ordered-float = { version = "1.0.2", features = ["serde"] }
sdset = "0.3.3" sdset = "0.3.3"
serde = { version = "1.0.101", features = ["derive"] } serde = { version = "1.0.101", features = ["derive"] }
serde_json = "1.0.41" serde_json = "1.0.41"
siphasher = "0.3.0" siphasher = "0.3.1"
slice-group-by = "0.2.6" slice-group-by = "0.2.6"
zerocopy = "0.2.8" zerocopy = "0.2.8"

View File

@ -12,8 +12,8 @@ use serde::{Deserialize, Serialize};
use structopt::StructOpt; use structopt::StructOpt;
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use meilidb_core::{Database, Highlight, ProcessedUpdateResult}; use meilisearch_core::{Database, Highlight, ProcessedUpdateResult};
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
#[derive(Debug, StructOpt)] #[derive(Debug, StructOpt)]
struct IndexCommand { struct IndexCommand {
@ -22,7 +22,7 @@ struct IndexCommand {
database_path: PathBuf, database_path: PathBuf,
#[structopt(long, default_value = "default")] #[structopt(long, default_value = "default")]
index_name: String, index_uid: String,
/// The csv file to index. /// The csv file to index.
#[structopt(parse(from_os_str))] #[structopt(parse(from_os_str))]
@ -46,7 +46,7 @@ struct SearchCommand {
database_path: PathBuf, database_path: PathBuf,
#[structopt(long, default_value = "default")] #[structopt(long, default_value = "default")]
index_name: String, index_uid: String,
/// Timeout after which the search will return results. /// Timeout after which the search will return results.
#[structopt(long)] #[structopt(long)]
@ -76,7 +76,7 @@ struct ShowUpdatesCommand {
database_path: PathBuf, database_path: PathBuf,
#[structopt(long, default_value = "default")] #[structopt(long, default_value = "default")]
index_name: String, index_uid: String,
} }
#[derive(Debug, StructOpt)] #[derive(Debug, StructOpt)]
@ -106,31 +106,32 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box<dy
let (sender, receiver) = mpsc::sync_channel(100); let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = let update_fn =
move |_name: &str, update: ProcessedUpdateResult| sender.send(update.update_id).unwrap(); move |_name: &str, update: ProcessedUpdateResult| sender.send(update.update_id).unwrap();
let index = match database.open_index(&command.index_name) { let index = match database.open_index(&command.index_uid) {
Some(index) => index, Some(index) => index,
None => database.create_index(&command.index_name).unwrap(), None => database.create_index(&command.index_uid).unwrap(),
}; };
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let env = &database.env; let db = &database;
let schema = { let schema = {
let string = fs::read_to_string(&command.schema)?; let string = fs::read_to_string(&command.schema)?;
toml::from_str(&string).unwrap() toml::from_str(&string).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let reader = db.main_read_txn().unwrap();
match index.main.schema(&writer)? { let mut update_writer = db.update_write_txn().unwrap();
match index.main.schema(&reader)? {
Some(current_schema) => { Some(current_schema) => {
if current_schema != schema { if current_schema != schema {
return Err(meilidb_core::Error::SchemaDiffer.into()); return Err(meilisearch_core::Error::SchemaDiffer.into());
} }
writer.abort(); update_writer.abort();
} }
None => { None => {
index.schema_update(&mut writer, schema)?; index.schema_update(&mut update_writer, schema)?;
writer.commit().unwrap(); update_writer.commit().unwrap();
} }
} }
@ -173,10 +174,10 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box<dy
println!(); println!();
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
println!("committing update..."); println!("committing update...");
let update_id = additions.finalize(&mut writer)?; let update_id = additions.finalize(&mut update_writer)?;
writer.commit().unwrap(); update_writer.commit().unwrap();
max_update_id = max_update_id.max(update_id); max_update_id = max_update_id.max(update_id);
println!("committed update {}", update_id); println!("committed update {}", update_id);
} }
@ -316,16 +317,16 @@ fn crop_text(
} }
fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<dyn Error>> { fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<dyn Error>> {
let env = &database.env; let db = &database;
let index = database let index = database
.open_index(&command.index_name) .open_index(&command.index_uid)
.expect("Could not find index"); .expect("Could not find index");
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let schema = index.main.schema(&reader)?; let schema = index.main.schema(&reader)?;
reader.abort(); reader.abort();
let schema = schema.ok_or(meilidb_core::Error::SchemaMissing)?; let schema = schema.ok_or(meilisearch_core::Error::SchemaMissing)?;
let fields = command.displayed_fields.iter().map(String::as_str); let fields = command.displayed_fields.iter().map(String::as_str);
let fields = HashSet::from_iter(fields); let fields = HashSet::from_iter(fields);
@ -339,7 +340,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
Ok(query) => { Ok(query) => {
let start_total = Instant::now(); let start_total = Instant::now();
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let ref_index = &index; let ref_index = &index;
let ref_reader = &reader; let ref_reader = &reader;
@ -444,12 +445,12 @@ fn show_updates_command(
command: ShowUpdatesCommand, command: ShowUpdatesCommand,
database: Database, database: Database,
) -> Result<(), Box<dyn Error>> { ) -> Result<(), Box<dyn Error>> {
let env = &database.env; let db = &database;
let index = database let index = database
.open_index(&command.index_name) .open_index(&command.index_uid)
.expect("Could not find index"); .expect("Could not find index");
let reader = env.read_txn().unwrap(); let reader = db.update_read_txn().unwrap();
let updates = index.all_updates_status(&reader)?; let updates = index.all_updates_status(&reader)?;
println!("{:#?}", updates); println!("{:#?}", updates);
reader.abort(); reader.abort();

View File

@ -6,8 +6,9 @@ use std::{cmp, vec};
use fst::{IntoStreamer, Streamer}; use fst::{IntoStreamer, Streamer};
use levenshtein_automata::DFA; use levenshtein_automata::DFA;
use meilidb_tokenizer::{is_cjk, split_query_string}; use meilisearch_tokenizer::{is_cjk, split_query_string};
use crate::database::MainT;
use crate::error::MResult; use crate::error::MResult;
use crate::store; use crate::store;
@ -23,7 +24,7 @@ pub struct AutomatonProducer {
impl AutomatonProducer { impl AutomatonProducer {
pub fn new( pub fn new(
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
query: &str, query: &str,
main_store: store::Main, main_store: store::Main,
postings_list_store: store::PostingsLists, postings_list_store: store::PostingsLists,
@ -131,7 +132,7 @@ pub fn normalize_str(string: &str) -> String {
} }
fn split_best_frequency<'a>( fn split_best_frequency<'a>(
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
word: &'a str, word: &'a str,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
) -> MResult<Option<(&'a str, &'a str)>> { ) -> MResult<Option<(&'a str, &'a str)>> {
@ -159,7 +160,7 @@ fn split_best_frequency<'a>(
} }
fn generate_automatons( fn generate_automatons(
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
query: &str, query: &str,
main_store: store::Main, main_store: store::Main,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,

View File

@ -1,6 +1,6 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
use sdset::Set; use sdset::Set;
use slice_group_by::GroupBy; use slice_group_by::GroupBy;

View File

@ -4,7 +4,7 @@ use std::fmt;
use crate::criterion::Criterion; use crate::criterion::Criterion;
use crate::{RankedMap, RawDocument}; use crate::{RankedMap, RawDocument};
use meilidb_schema::{Schema, SchemaAttr}; use meilisearch_schema::{Schema, SchemaAttr};
/// An helper struct that permit to sort documents by /// An helper struct that permit to sort documents by
/// some of their stored attributes. /// some of their stored attributes.
@ -23,7 +23,7 @@ use meilidb_schema::{Schema, SchemaAttr};
/// ///
/// ```ignore /// ```ignore
/// use serde_derive::Deserialize; /// use serde_derive::Deserialize;
/// use meilidb::rank::criterion::*; /// use meilisearch::rank::criterion::*;
/// ///
/// let custom_ranking = SortByAttr::lower_is_better(&ranked_map, &schema, "published_at")?; /// let custom_ranking = SortByAttr::lower_is_better(&ranked_map, &schema, "published_at")?;
/// ///

View File

@ -14,11 +14,15 @@ use crate::{store, update, Index, MResult};
pub type BoxUpdateFn = Box<dyn Fn(&str, update::ProcessedUpdateResult) + Send + Sync + 'static>; pub type BoxUpdateFn = Box<dyn Fn(&str, update::ProcessedUpdateResult) + Send + Sync + 'static>;
type ArcSwapFn = arc_swap::ArcSwapOption<BoxUpdateFn>; type ArcSwapFn = arc_swap::ArcSwapOption<BoxUpdateFn>;
pub struct MainT;
pub struct UpdateT;
pub struct Database { pub struct Database {
pub env: heed::Env, env: heed::Env,
update_env: heed::Env,
common_store: heed::PolyDatabase, common_store: heed::PolyDatabase,
indexes_store: heed::Database<Str, Unit>, indexes_store: heed::Database<Str, Unit>,
indexes: RwLock<HashMap<String, (Index, thread::JoinHandle<()>)>>, indexes: RwLock<HashMap<String, (Index, thread::JoinHandle<MResult<()>>)>>,
update_fn: Arc<ArcSwapFn>, update_fn: Arc<ArcSwapFn>,
} }
@ -36,7 +40,7 @@ macro_rules! r#break_try {
pub enum UpdateEvent { pub enum UpdateEvent {
NewUpdate, NewUpdate,
MustStop, MustClear,
} }
pub type UpdateEvents = Receiver<UpdateEvent>; pub type UpdateEvents = Receiver<UpdateEvent>;
@ -45,68 +49,107 @@ pub type UpdateEventsEmitter = Sender<UpdateEvent>;
fn update_awaiter( fn update_awaiter(
receiver: UpdateEvents, receiver: UpdateEvents,
env: heed::Env, env: heed::Env,
index_name: &str, update_env: heed::Env,
index_uid: &str,
update_fn: Arc<ArcSwapFn>, update_fn: Arc<ArcSwapFn>,
index: Index, index: Index,
) { ) -> MResult<()> {
let mut receiver = receiver.into_iter(); let mut receiver = receiver.into_iter();
while let Some(UpdateEvent::NewUpdate) = receiver.next() { while let Some(event) = receiver.next() {
// if we receive a *MustClear* event, clear the index and break the loop
if let UpdateEvent::MustClear = event {
let mut writer = env.typed_write_txn::<MainT>()?;
let mut update_writer = update_env.typed_write_txn::<UpdateT>()?;
store::clear(&mut writer, &mut update_writer, &index)?;
writer.commit()?;
update_writer.commit()?;
debug!("store {} cleared", index_uid);
break
}
loop { loop {
// instantiate a main/parent transaction // We instantiate a *write* transaction to *block* the thread
let mut writer = break_try!(env.write_txn(), "LMDB write transaction begin failed"); // until the *other*, notifiying, thread commits
let result = update_env.typed_write_txn::<UpdateT>();
let update_reader = break_try!(result, "LMDB read transaction (update) begin failed");
// retrieve the update that needs to be processed // retrieve the update that needs to be processed
let result = index.updates.pop_front(&mut writer); let result = index.updates.first_update(&update_reader);
let (update_id, update) = match break_try!(result, "pop front update failed") { let (update_id, update) = match break_try!(result, "pop front update failed") {
Some(value) => value, Some(value) => value,
None => { None => {
debug!("no more updates"); debug!("no more updates");
writer.abort();
break; break;
} }
}; };
// instantiate a nested transaction // do not keep the reader for too long
let result = env.nested_write_txn(&mut writer); update_reader.abort();
let mut nested_writer = break_try!(result, "LMDB nested write transaction failed");
// try to apply the update to the database using the nested transaction // instantiate a transaction to touch to the main env
let result = update::update_task(&mut nested_writer, index.clone(), update_id, update); let result = env.typed_write_txn::<MainT>();
let mut main_writer = break_try!(result, "LMDB nested write transaction failed");
// try to apply the update to the database using the main transaction
let result = update::update_task(&mut main_writer, &index, update_id, update);
let status = break_try!(result, "update task failed"); let status = break_try!(result, "update task failed");
// commit the nested transaction if the update was successful, abort it otherwise // commit the main transaction if the update was successful, abort it otherwise
if status.error.is_none() { if status.error.is_none() {
break_try!(nested_writer.commit(), "commit nested transaction failed"); break_try!(main_writer.commit(), "commit nested transaction failed");
} else { } else {
nested_writer.abort() main_writer.abort()
} }
// write the result of the update in the updates-results store // now that the update has been processed we can instantiate
let updates_results = index.updates_results; // a transaction to move the result to the updates-results store
let result = updates_results.put_update_result(&mut writer, update_id, &status); let result = update_env.typed_write_txn::<UpdateT>();
let mut update_writer = break_try!(result, "LMDB write transaction begin failed");
// always commit the main/parent transaction, even if the update was unsuccessful // definitely remove the update from the updates store
index.updates.del_update(&mut update_writer, update_id)?;
// write the result of the updates-results store
let updates_results = index.updates_results;
let result = updates_results.put_update_result(&mut update_writer, update_id, &status);
// always commit the main transaction, even if the update was unsuccessful
break_try!(result, "update result store commit failed"); break_try!(result, "update result store commit failed");
break_try!(writer.commit(), "update parent transaction failed"); break_try!(update_writer.commit(), "update transaction commit failed");
// call the user callback when the update and the result are written consistently // call the user callback when the update and the result are written consistently
if let Some(ref callback) = *update_fn.load() { if let Some(ref callback) = *update_fn.load() {
(callback)(index_name, status); (callback)(index_uid, status);
} }
} }
} }
debug!("update loop system stopped"); debug!("update loop system stopped");
Ok(())
} }
impl Database { impl Database {
pub fn open_or_create(path: impl AsRef<Path>) -> MResult<Database> { pub fn open_or_create(path: impl AsRef<Path>) -> MResult<Database> {
fs::create_dir_all(path.as_ref())?; let main_path = path.as_ref().join("main");
let update_path = path.as_ref().join("update");
fs::create_dir_all(&main_path)?;
let env = heed::EnvOpenOptions::new() let env = heed::EnvOpenOptions::new()
.map_size(10 * 1024 * 1024 * 1024) // 10GB .map_size(10 * 1024 * 1024 * 1024) // 10GB
.max_dbs(3000) .max_dbs(3000)
.open(path)?; .open(main_path)?;
fs::create_dir_all(&update_path)?;
let update_env = heed::EnvOpenOptions::new()
.map_size(10 * 1024 * 1024 * 1024) // 10GB
.max_dbs(3000)
.open(update_path)?;
let common_store = env.create_poly_database(Some("common"))?; let common_store = env.create_poly_database(Some("common"))?;
let indexes_store = env.create_database::<Str, Unit>(Some("indexes"))?; let indexes_store = env.create_database::<Str, Unit>(Some("indexes"))?;
@ -116,36 +159,38 @@ impl Database {
let mut must_open = Vec::new(); let mut must_open = Vec::new();
let reader = env.read_txn()?; let reader = env.read_txn()?;
for result in indexes_store.iter(&reader)? { for result in indexes_store.iter(&reader)? {
let (index_name, _) = result?; let (index_uid, _) = result?;
must_open.push(index_name.to_owned()); must_open.push(index_uid.to_owned());
} }
reader.abort(); reader.abort();
// open the previously aggregated indexes // open the previously aggregated indexes
let mut indexes = HashMap::new(); let mut indexes = HashMap::new();
for index_name in must_open { for index_uid in must_open {
let (sender, receiver) = crossbeam_channel::bounded(100); let (sender, receiver) = crossbeam_channel::bounded(100);
let index = match store::open(&env, &index_name, sender.clone())? { let index = match store::open(&env, &update_env, &index_uid, sender.clone())? {
Some(index) => index, Some(index) => index,
None => { None => {
log::warn!( log::warn!(
"the index {} doesn't exist or has not all the databases", "the index {} doesn't exist or has not all the databases",
index_name index_uid
); );
continue; continue;
} }
}; };
let env_clone = env.clone(); let env_clone = env.clone();
let update_env_clone = update_env.clone();
let index_clone = index.clone(); let index_clone = index.clone();
let name_clone = index_name.clone(); let name_clone = index_uid.clone();
let update_fn_clone = update_fn.clone(); let update_fn_clone = update_fn.clone();
let handle = thread::spawn(move || { let handle = thread::spawn(move || {
update_awaiter( update_awaiter(
receiver, receiver,
env_clone, env_clone,
update_env_clone,
&name_clone, &name_clone,
update_fn_clone, update_fn_clone,
index_clone, index_clone,
@ -156,7 +201,7 @@ impl Database {
// possible pre-boot updates are consumed // possible pre-boot updates are consumed
sender.send(UpdateEvent::NewUpdate).unwrap(); sender.send(UpdateEvent::NewUpdate).unwrap();
let result = indexes.insert(index_name, (index, handle)); let result = indexes.insert(index_uid, (index, handle));
assert!( assert!(
result.is_none(), result.is_none(),
"The index should not have been already open" "The index should not have been already open"
@ -165,6 +210,7 @@ impl Database {
Ok(Database { Ok(Database {
env, env,
update_env,
common_store, common_store,
indexes_store, indexes_store,
indexes: RwLock::new(indexes), indexes: RwLock::new(indexes),
@ -188,12 +234,13 @@ impl Database {
Entry::Occupied(_) => Err(crate::Error::IndexAlreadyExists), Entry::Occupied(_) => Err(crate::Error::IndexAlreadyExists),
Entry::Vacant(entry) => { Entry::Vacant(entry) => {
let (sender, receiver) = crossbeam_channel::bounded(100); let (sender, receiver) = crossbeam_channel::bounded(100);
let index = store::create(&self.env, name, sender)?; let index = store::create(&self.env, &self.update_env, name, sender)?;
let mut writer = self.env.write_txn()?; let mut writer = self.env.write_txn()?;
self.indexes_store.put(&mut writer, name, &())?; self.indexes_store.put(&mut writer, name, &())?;
let env_clone = self.env.clone(); let env_clone = self.env.clone();
let update_env_clone = self.update_env.clone();
let index_clone = index.clone(); let index_clone = index.clone();
let name_clone = name.to_owned(); let name_clone = name.to_owned();
let update_fn_clone = self.update_fn.clone(); let update_fn_clone = self.update_fn.clone();
@ -202,6 +249,7 @@ impl Database {
update_awaiter( update_awaiter(
receiver, receiver,
env_clone, env_clone,
update_env_clone,
&name_clone, &name_clone,
update_fn_clone, update_fn_clone,
index_clone, index_clone,
@ -226,11 +274,15 @@ impl Database {
// and clear all the LMDB dbi // and clear all the LMDB dbi
let mut writer = self.env.write_txn()?; let mut writer = self.env.write_txn()?;
self.indexes_store.delete(&mut writer, &name)?; self.indexes_store.delete(&mut writer, &name)?;
store::clear(&mut writer, &index)?;
writer.commit()?; writer.commit()?;
// send a stop event to the update loop of the index
index.updates_notifier.send(UpdateEvent::MustClear).unwrap();
drop(indexes_lock);
// join the update loop thread to ensure it is stopped // join the update loop thread to ensure it is stopped
handle.join().unwrap(); handle.join().unwrap()?;
Ok(true) Ok(true)
} }
@ -247,13 +299,29 @@ impl Database {
self.update_fn.swap(None); self.update_fn.swap(None);
} }
pub fn main_read_txn(&self) -> heed::Result<heed::RoTxn<MainT>> {
self.env.typed_read_txn::<MainT>()
}
pub fn main_write_txn(&self) -> heed::Result<heed::RwTxn<MainT>> {
self.env.typed_write_txn::<MainT>()
}
pub fn update_read_txn(&self) -> heed::Result<heed::RoTxn<UpdateT>> {
self.update_env.typed_read_txn::<UpdateT>()
}
pub fn update_write_txn(&self) -> heed::Result<heed::RwTxn<UpdateT>> {
self.update_env.typed_write_txn::<UpdateT>()
}
pub fn copy_and_compact_to_path<P: AsRef<Path>>(&self, path: P) -> ZResult<File> { pub fn copy_and_compact_to_path<P: AsRef<Path>>(&self, path: P) -> ZResult<File> {
self.env.copy_to_path(path, CompactionOption::Enabled) self.env.copy_to_path(path, CompactionOption::Enabled)
} }
pub fn indexes_names(&self) -> MResult<Vec<String>> { pub fn indexes_uids(&self) -> Vec<String> {
let indexes = self.indexes.read().unwrap(); let indexes = self.indexes.read().unwrap();
Ok(indexes.keys().cloned().collect()) indexes.keys().cloned().collect()
} }
pub fn common_store(&self) -> heed::PolyDatabase { pub fn common_store(&self) -> heed::PolyDatabase {
@ -265,8 +333,9 @@ impl Database {
mod tests { mod tests {
use super::*; use super::*;
use crate::criterion::{self, CriteriaBuilder};
use crate::update::{ProcessedUpdateResult, UpdateStatus}; use crate::update::{ProcessedUpdateResult, UpdateStatus};
use crate::DocumentId; use crate::{Document, DocumentId};
use serde::de::IgnoredAny; use serde::de::IgnoredAny;
use std::sync::mpsc; use std::sync::mpsc;
@ -275,7 +344,7 @@ mod tests {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap(); let database = Database::open_or_create(dir.path()).unwrap();
let env = &database.env; let db = &database;
let (sender, receiver) = mpsc::sync_channel(100); let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = move |_name: &str, update: ProcessedUpdateResult| { let update_fn = move |_name: &str, update: ProcessedUpdateResult| {
@ -300,9 +369,9 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let _update_id = index.schema_update(&mut update_writer, schema).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -321,15 +390,15 @@ mod tests {
additions.update_document(doc1); additions.update_document(doc1);
additions.update_document(doc2); additions.update_document(doc2);
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap(); let update_id = additions.finalize(&mut update_writer).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
let _ = receiver.into_iter().find(|id| *id == update_id); let _ = receiver.into_iter().find(|id| *id == update_id);
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
} }
@ -338,7 +407,7 @@ mod tests {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap(); let database = Database::open_or_create(dir.path()).unwrap();
let env = &database.env; let db = &database;
let (sender, receiver) = mpsc::sync_channel(100); let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = move |_name: &str, update: ProcessedUpdateResult| { let update_fn = move |_name: &str, update: ProcessedUpdateResult| {
@ -363,9 +432,9 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let _update_id = index.schema_update(&mut update_writer, schema).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -383,15 +452,15 @@ mod tests {
additions.update_document(doc1); additions.update_document(doc1);
additions.update_document(doc2); additions.update_document(doc2);
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap(); let update_id = additions.finalize(&mut update_writer).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
let _ = receiver.into_iter().find(|id| *id == update_id); let _ = receiver.into_iter().find(|id| *id == update_id);
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_some()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_some());
} }
@ -400,7 +469,7 @@ mod tests {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap(); let database = Database::open_or_create(dir.path()).unwrap();
let env = &database.env; let db = &database;
let (sender, receiver) = mpsc::sync_channel(100); let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = move |_name: &str, update: ProcessedUpdateResult| { let update_fn = move |_name: &str, update: ProcessedUpdateResult| {
@ -421,9 +490,9 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let _update_id = index.schema_update(&mut update_writer, schema).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -434,15 +503,15 @@ mod tests {
additions.update_document(doc1); additions.update_document(doc1);
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap(); let update_id = additions.finalize(&mut update_writer).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
let _ = receiver.into_iter().find(|id| *id == update_id); let _ = receiver.into_iter().find(|id| *id == update_id);
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
} }
@ -451,7 +520,7 @@ mod tests {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap(); let database = Database::open_or_create(dir.path()).unwrap();
let env = &database.env; let db = &database;
let (sender, receiver) = mpsc::sync_channel(100); let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = move |_name: &str, update: ProcessedUpdateResult| { let update_fn = move |_name: &str, update: ProcessedUpdateResult| {
@ -476,9 +545,9 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let _update_id = index.schema_update(&mut update_writer, schema).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -497,9 +566,9 @@ mod tests {
additions.update_document(doc1); additions.update_document(doc1);
additions.update_document(doc2); additions.update_document(doc2);
let mut writer = env.write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = additions.finalize(&mut writer).unwrap(); let _update_id = additions.finalize(&mut update_writer).unwrap();
writer.commit().unwrap(); update_writer.commit().unwrap();
let schema = { let schema = {
let data = r#" let data = r#"
@ -524,7 +593,7 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.schema_update(&mut writer, schema).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
@ -532,10 +601,10 @@ mod tests {
let _ = receiver.iter().find(|id| *id == update_id); let _ = receiver.iter().find(|id| *id == update_id);
// check if it has been accepted // check if it has been accepted
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
reader.abort(); update_reader.abort();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -558,7 +627,7 @@ mod tests {
additions.update_document(doc1); additions.update_document(doc1);
additions.update_document(doc2); additions.update_document(doc2);
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap(); let update_id = additions.finalize(&mut writer).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
@ -566,11 +635,13 @@ mod tests {
let _ = receiver.iter().find(|id| *id == update_id); let _ = receiver.iter().find(|id| *id == update_id);
// check if it has been accepted // check if it has been accepted
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
update_reader.abort();
// even try to search for a document // even try to search for a document
let reader = db.main_read_txn().unwrap();
let results = index.query_builder().query(&reader, "21 ", 0..20).unwrap(); let results = index.query_builder().query(&reader, "21 ", 0..20).unwrap();
assert_matches!(results.len(), 1); assert_matches!(results.len(), 1);
@ -604,7 +675,7 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.schema_update(&mut writer, schema).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
@ -612,8 +683,8 @@ mod tests {
let _ = receiver.iter().find(|id| *id == update_id); let _ = receiver.iter().find(|id| *id == update_id);
// check if it has been accepted // check if it has been accepted
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_some()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_some());
} }
@ -622,7 +693,7 @@ mod tests {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap(); let database = Database::open_or_create(dir.path()).unwrap();
let env = &database.env; let db = &database;
let (sender, receiver) = mpsc::sync_channel(100); let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = move |_name: &str, update: ProcessedUpdateResult| { let update_fn = move |_name: &str, update: ProcessedUpdateResult| {
@ -647,7 +718,7 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let _update_id = index.schema_update(&mut writer, schema).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
@ -670,17 +741,19 @@ mod tests {
additions.update_document(doc1); additions.update_document(doc1);
additions.update_document(doc2); additions.update_document(doc2);
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap(); let update_id = additions.finalize(&mut writer).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
let _ = receiver.into_iter().find(|id| *id == update_id); let _ = receiver.into_iter().find(|id| *id == update_id);
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
update_reader.abort();
let reader = db.main_read_txn().unwrap();
let document: Option<IgnoredAny> = index.document(&reader, None, DocumentId(25)).unwrap(); let document: Option<IgnoredAny> = index.document(&reader, None, DocumentId(25)).unwrap();
assert!(document.is_none()); assert!(document.is_none());
@ -700,7 +773,7 @@ mod tests {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap(); let database = Database::open_or_create(dir.path()).unwrap();
let env = &database.env; let db = &database;
let (sender, receiver) = mpsc::sync_channel(100); let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = move |_name: &str, update: ProcessedUpdateResult| { let update_fn = move |_name: &str, update: ProcessedUpdateResult| {
@ -728,7 +801,7 @@ mod tests {
toml::from_str(data).unwrap() toml::from_str(data).unwrap()
}; };
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let _update_id = index.schema_update(&mut writer, schema).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
@ -751,17 +824,19 @@ mod tests {
additions.update_document(doc1); additions.update_document(doc1);
additions.update_document(doc2); additions.update_document(doc2);
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap(); let update_id = additions.finalize(&mut writer).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id); let _ = receiver.iter().find(|id| *id == update_id);
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
update_reader.abort();
let reader = db.main_read_txn().unwrap();
let document: Option<IgnoredAny> = index.document(&reader, None, DocumentId(25)).unwrap(); let document: Option<IgnoredAny> = index.document(&reader, None, DocumentId(25)).unwrap();
assert!(document.is_none()); assert!(document.is_none());
@ -794,17 +869,19 @@ mod tests {
partial_additions.update_document(partial_doc1); partial_additions.update_document(partial_doc1);
partial_additions.update_document(partial_doc2); partial_additions.update_document(partial_doc2);
let mut writer = env.write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = partial_additions.finalize(&mut writer).unwrap(); let update_id = partial_additions.finalize(&mut writer).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id); let _ = receiver.iter().find(|id| *id == update_id);
let reader = env.read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
update_reader.abort();
let reader = db.main_read_txn().unwrap();
let document: Option<serde_json::Value> = index let document: Option<serde_json::Value> = index
.document(&reader, None, DocumentId(7900334843754999545)) .document(&reader, None, DocumentId(7900334843754999545))
.unwrap(); .unwrap();
@ -832,13 +909,166 @@ mod tests {
fn delete_index() { fn delete_index() {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap(); let database = Arc::new(Database::open_or_create(dir.path()).unwrap());
let _index = database.create_index("test").unwrap(); let db = &database;
let (sender, receiver) = mpsc::sync_channel(100);
let db_cloned = database.clone();
let update_fn = move |name: &str, update: ProcessedUpdateResult| {
// try to open index to trigger a lock
let _ = db_cloned.open_index(name);
sender.send(update.update_id).unwrap()
};
// create the index
let index = database.create_index("test").unwrap();
database.set_update_callback(Box::new(update_fn));
let schema = {
let data = r#"
identifier = "id"
[attributes."name"]
displayed = true
indexed = true
[attributes."description"]
displayed = true
indexed = true
"#;
toml::from_str(data).unwrap()
};
// add a schema to the index
let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap();
writer.commit().unwrap();
// add documents to the index
let mut additions = index.documents_addition();
let doc1 = serde_json::json!({
"id": 123,
"name": "Marvin",
"description": "My name is Marvin",
});
let doc2 = serde_json::json!({
"id": 234,
"name": "Kevin",
"description": "My name is Kevin",
});
additions.update_document(doc1);
additions.update_document(doc2);
let mut writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap();
writer.commit().unwrap();
// delete the index
let deleted = database.delete_index("test").unwrap(); let deleted = database.delete_index("test").unwrap();
assert!(deleted); assert!(deleted);
// block until the transaction is processed
let _ = receiver.into_iter().find(|id| *id == update_id);
let result = database.open_index("test"); let result = database.open_index("test");
assert!(result.is_none()); assert!(result.is_none());
} }
#[test]
fn check_number_ordering() {
let dir = tempfile::tempdir().unwrap();
let database = Database::open_or_create(dir.path()).unwrap();
let db = &database;
let (sender, receiver) = mpsc::sync_channel(100);
let update_fn = move |_name: &str, update: ProcessedUpdateResult| {
sender.send(update.update_id).unwrap()
};
let index = database.create_index("test").unwrap();
database.set_update_callback(Box::new(update_fn));
let schema = {
let data = r#"
identifier = "id"
[attributes."name"]
displayed = true
indexed = true
[attributes."release_date"]
displayed = true
ranked = true
"#;
toml::from_str(data).unwrap()
};
let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap();
writer.commit().unwrap();
let mut additions = index.documents_addition();
// DocumentId(7900334843754999545)
let doc1 = serde_json::json!({
"id": 123,
"name": "Kevin the first",
"release_date": -10000,
});
// DocumentId(8367468610878465872)
let doc2 = serde_json::json!({
"id": 234,
"name": "Kevin the second",
"release_date": 10000,
});
additions.update_document(doc1);
additions.update_document(doc2);
let mut writer = db.update_write_txn().unwrap();
let update_id = additions.finalize(&mut writer).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.into_iter().find(|id| *id == update_id);
let reader = db.main_read_txn().unwrap();
let schema = index.main.schema(&reader).unwrap().unwrap();
let ranked_map = index.main.ranked_map(&reader).unwrap().unwrap();
let criteria = CriteriaBuilder::new()
.add(
criterion::SortByAttr::lower_is_better(&ranked_map, &schema, "release_date")
.unwrap(),
)
.add(criterion::DocumentId)
.build();
let builder = index.query_builder_with_criteria(criteria);
let results = builder.query(&reader, "Kevin", 0..20).unwrap();
let mut iter = results.into_iter();
assert_matches!(
iter.next(),
Some(Document {
id: DocumentId(7900334843754999545),
..
})
);
assert_matches!(
iter.next(),
Some(Document {
id: DocumentId(8367468610878465872),
..
})
);
assert_matches!(iter.next(), None);
}
} }

View File

@ -18,14 +18,14 @@ pub mod serde;
pub mod store; pub mod store;
mod update; mod update;
pub use self::database::{BoxUpdateFn, Database}; pub use self::database::{BoxUpdateFn, Database, MainT, UpdateT};
pub use self::error::{Error, MResult}; pub use self::error::{Error, MResult};
pub use self::number::{Number, ParseNumberError}; pub use self::number::{Number, ParseNumberError};
pub use self::ranked_map::RankedMap; pub use self::ranked_map::RankedMap;
pub use self::raw_document::RawDocument; pub use self::raw_document::RawDocument;
pub use self::store::Index; pub use self::store::Index;
pub use self::update::{EnqueuedUpdateResult, ProcessedUpdateResult, UpdateStatus, UpdateType}; pub use self::update::{EnqueuedUpdateResult, ProcessedUpdateResult, UpdateStatus, UpdateType};
pub use meilidb_types::{DocIndex, DocumentId, Highlight}; pub use meilisearch_types::{DocIndex, DocumentId, Highlight};
#[doc(hidden)] #[doc(hidden)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]

View File

@ -1,3 +1,4 @@
use std::cmp::Ordering;
use std::fmt; use std::fmt;
use std::num::{ParseFloatError, ParseIntError}; use std::num::{ParseFloatError, ParseIntError};
use std::str::FromStr; use std::str::FromStr;
@ -5,7 +6,7 @@ use std::str::FromStr;
use ordered_float::OrderedFloat; use ordered_float::OrderedFloat;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Serialize, Deserialize, Debug, Copy, Clone, Hash)]
pub enum Number { pub enum Number {
Unsigned(u64), Unsigned(u64),
Signed(i64), Signed(i64),
@ -39,6 +40,50 @@ impl FromStr for Number {
} }
} }
impl PartialEq for Number {
fn eq(&self, other: &Number) -> bool {
self.cmp(other) == Ordering::Equal
}
}
impl Eq for Number {}
impl PartialOrd for Number {
fn partial_cmp(&self, other: &Number) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for Number {
fn cmp(&self, other: &Self) -> Ordering {
use Number::{Float, Signed, Unsigned};
match (*self, *other) {
(Unsigned(a), Unsigned(b)) => a.cmp(&b),
(Unsigned(a), Signed(b)) => {
if b < 0 {
Ordering::Greater
} else {
a.cmp(&(b as u64))
}
}
(Unsigned(a), Float(b)) => (OrderedFloat(a as f64)).cmp(&b),
(Signed(a), Unsigned(b)) => {
if a < 0 {
Ordering::Less
} else {
(a as u64).cmp(&b)
}
}
(Signed(a), Signed(b)) => a.cmp(&b),
(Signed(a), Float(b)) => OrderedFloat(a as f64).cmp(&b),
(Float(a), Unsigned(b)) => a.cmp(&OrderedFloat(b as f64)),
(Float(a), Signed(b)) => a.cmp(&OrderedFloat(b as f64)),
(Float(a), Float(b)) => a.cmp(&b),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ParseNumberError { pub struct ParseNumberError {
uint_error: ParseIntError, uint_error: ParseIntError,

View File

@ -9,6 +9,7 @@ use fst::{IntoStreamer, Streamer};
use sdset::SetBuf; use sdset::SetBuf;
use slice_group_by::{GroupBy, GroupByMut}; use slice_group_by::{GroupBy, GroupByMut};
use crate::database::MainT;
use crate::automaton::{Automaton, AutomatonGroup, AutomatonProducer, QueryEnhancer}; use crate::automaton::{Automaton, AutomatonGroup, AutomatonProducer, QueryEnhancer};
use crate::distinct_map::{BufferedDistinctMap, DistinctMap}; use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
use crate::levenshtein::prefix_damerau_levenshtein; use crate::levenshtein::prefix_damerau_levenshtein;
@ -139,7 +140,7 @@ fn multiword_rewrite_matches(
} }
fn fetch_raw_documents( fn fetch_raw_documents(
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
automatons_groups: &[AutomatonGroup], automatons_groups: &[AutomatonGroup],
query_enhancer: &QueryEnhancer, query_enhancer: &QueryEnhancer,
searchables: Option<&ReorderedAttrs>, searchables: Option<&ReorderedAttrs>,
@ -336,7 +337,7 @@ impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
pub fn query( pub fn query(
self, self,
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
query: &str, query: &str,
range: Range<usize>, range: Range<usize>,
) -> MResult<Vec<Document>> { ) -> MResult<Vec<Document>> {
@ -374,7 +375,7 @@ impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
} }
fn raw_query<'c, FI>( fn raw_query<'c, FI>(
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
query: &str, query: &str,
range: Range<usize>, range: Range<usize>,
@ -510,7 +511,7 @@ where
} }
fn raw_query_with_distinct<'c, FI, FD>( fn raw_query_with_distinct<'c, FI, FD>(
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
query: &str, query: &str,
range: Range<usize>, range: Range<usize>,
@ -695,7 +696,7 @@ mod tests {
use std::iter::FromIterator; use std::iter::FromIterator;
use fst::{IntoStreamer, Set}; use fst::{IntoStreamer, Set};
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
use sdset::SetBuf; use sdset::SetBuf;
use tempfile::TempDir; use tempfile::TempDir;
@ -765,8 +766,8 @@ mod tests {
} }
pub fn add_synonym(&mut self, word: &str, new: SetBuf<&str>) { pub fn add_synonym(&mut self, word: &str, new: SetBuf<&str>) {
let env = &self.database.env; let db = &self.database;
let mut writer = env.write_txn().unwrap(); let mut writer = db.main_write_txn().unwrap();
let word = word.to_lowercase(); let word = word.to_lowercase();
@ -809,8 +810,8 @@ mod tests {
let database = Database::open_or_create(&tempdir).unwrap(); let database = Database::open_or_create(&tempdir).unwrap();
let index = database.create_index("default").unwrap(); let index = database.create_index("default").unwrap();
let env = &database.env; let db = &database;
let mut writer = env.write_txn().unwrap(); let mut writer = db.main_write_txn().unwrap();
let mut words_fst = BTreeSet::new(); let mut words_fst = BTreeSet::new();
let mut postings_lists = HashMap::new(); let mut postings_lists = HashMap::new();
@ -872,8 +873,8 @@ mod tests {
("apple", &[doc_char_index(0, 2, 2)][..]), ("apple", &[doc_char_index(0, 2, 2)][..]),
]); ]);
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "iphone from apple", 0..20).unwrap(); let results = builder.query(&reader, "iphone from apple", 0..20).unwrap();
@ -895,8 +896,8 @@ mod tests {
store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello"])); store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "hello", 0..20).unwrap(); let results = builder.query(&reader, "hello", 0..20).unwrap();
@ -928,8 +929,8 @@ mod tests {
store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello"])); store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello"]));
store.add_synonym("salut", SetBuf::from_dirty(vec!["hello"])); store.add_synonym("salut", SetBuf::from_dirty(vec!["hello"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "sal", 0..20).unwrap(); let results = builder.query(&reader, "sal", 0..20).unwrap();
@ -972,8 +973,8 @@ mod tests {
store.add_synonym("salutation", SetBuf::from_dirty(vec!["hello"])); store.add_synonym("salutation", SetBuf::from_dirty(vec!["hello"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "salutution", 0..20).unwrap(); let results = builder.query(&reader, "salutution", 0..20).unwrap();
@ -1010,8 +1011,8 @@ mod tests {
store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello", "salut"])); store.add_synonym("bonjour", SetBuf::from_dirty(vec!["hello", "salut"]));
store.add_synonym("salut", SetBuf::from_dirty(vec!["hello", "bonjour"])); store.add_synonym("salut", SetBuf::from_dirty(vec!["hello", "bonjour"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "hello", 0..20).unwrap(); let results = builder.query(&reader, "hello", 0..20).unwrap();
@ -1098,8 +1099,8 @@ mod tests {
SetBuf::from_dirty(vec!["NY", "new york", "new york city"]), SetBuf::from_dirty(vec!["NY", "new york", "new york city"]),
); );
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "NY subway", 0..20).unwrap(); let results = builder.query(&reader, "NY subway", 0..20).unwrap();
@ -1168,8 +1169,8 @@ mod tests {
store.add_synonym("NY", SetBuf::from_dirty(vec!["york new"])); store.add_synonym("NY", SetBuf::from_dirty(vec!["york new"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "NY", 0..20).unwrap(); let results = builder.query(&reader, "NY", 0..20).unwrap();
@ -1226,8 +1227,8 @@ mod tests {
store.add_synonym("new york", SetBuf::from_dirty(vec!["NY"])); store.add_synonym("new york", SetBuf::from_dirty(vec!["NY"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "NY subway", 0..20).unwrap(); let results = builder.query(&reader, "NY subway", 0..20).unwrap();
@ -1291,8 +1292,8 @@ mod tests {
SetBuf::from_dirty(vec!["NY", "new york", "new york city"]), SetBuf::from_dirty(vec!["NY", "new york", "new york city"]),
); );
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "NY subway", 0..20).unwrap(); let results = builder.query(&reader, "NY subway", 0..20).unwrap();
@ -1372,8 +1373,8 @@ mod tests {
); );
store.add_synonym("subway", SetBuf::from_dirty(vec!["underground train"])); store.add_synonym("subway", SetBuf::from_dirty(vec!["underground train"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "NY subway broken", 0..20).unwrap(); let results = builder.query(&reader, "NY subway broken", 0..20).unwrap();
@ -1459,8 +1460,8 @@ mod tests {
); );
store.add_synonym("underground train", SetBuf::from_dirty(vec!["subway"])); store.add_synonym("underground train", SetBuf::from_dirty(vec!["subway"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder let results = builder
@ -1559,8 +1560,8 @@ mod tests {
store.add_synonym("new york", SetBuf::from_dirty(vec!["new york city"])); store.add_synonym("new york", SetBuf::from_dirty(vec!["new york city"]));
store.add_synonym("new york city", SetBuf::from_dirty(vec!["new york"])); store.add_synonym("new york city", SetBuf::from_dirty(vec!["new york"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "new york big ", 0..20).unwrap(); let results = builder.query(&reader, "new york big ", 0..20).unwrap();
@ -1596,8 +1597,8 @@ mod tests {
store.add_synonym("NY", SetBuf::from_dirty(vec!["new york city story"])); store.add_synonym("NY", SetBuf::from_dirty(vec!["new york city story"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "NY subway ", 0..20).unwrap(); let results = builder.query(&reader, "NY subway ", 0..20).unwrap();
@ -1646,8 +1647,8 @@ mod tests {
store.add_synonym("new york city", SetBuf::from_dirty(vec!["NYC"])); store.add_synonym("new york city", SetBuf::from_dirty(vec!["NYC"]));
store.add_synonym("subway", SetBuf::from_dirty(vec!["underground train"])); store.add_synonym("subway", SetBuf::from_dirty(vec!["underground train"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder let results = builder
@ -1672,15 +1673,15 @@ mod tests {
#[test] #[test]
fn deunicoded_synonyms() { fn deunicoded_synonyms() {
let mut store = TempDatabase::from_iter(vec![ let mut store = TempDatabase::from_iter(vec![
("telephone", &[doc_index(0, 0)][..]), // meilidb indexes the unidecoded ("telephone", &[doc_index(0, 0)][..]), // meilisearch indexes the unidecoded
("téléphone", &[doc_index(0, 0)][..]), // and the original words on the same DocIndex ("téléphone", &[doc_index(0, 0)][..]), // and the original words on the same DocIndex
("iphone", &[doc_index(1, 0)][..]), ("iphone", &[doc_index(1, 0)][..]),
]); ]);
store.add_synonym("téléphone", SetBuf::from_dirty(vec!["iphone"])); store.add_synonym("téléphone", SetBuf::from_dirty(vec!["iphone"]));
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "telephone", 0..20).unwrap(); let results = builder.query(&reader, "telephone", 0..20).unwrap();
@ -1741,8 +1742,8 @@ mod tests {
("case", &[doc_index(0, 1)][..]), ("case", &[doc_index(0, 1)][..]),
]); ]);
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "i phone case", 0..20).unwrap(); let results = builder.query(&reader, "i phone case", 0..20).unwrap();
@ -1769,8 +1770,8 @@ mod tests {
("engine", &[doc_index(1, 2)][..]), ("engine", &[doc_index(1, 2)][..]),
]); ]);
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "searchengine", 0..20).unwrap(); let results = builder.query(&reader, "searchengine", 0..20).unwrap();
@ -1801,8 +1802,8 @@ mod tests {
("engine", &[doc_index(1, 3)][..]), ("engine", &[doc_index(1, 3)][..]),
]); ]);
let env = &store.database.env; let db = &store.database;
let reader = env.read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let builder = store.query_builder(); let builder = store.query_builder();
let results = builder.query(&reader, "searchengine", 0..20).unwrap(); let results = builder.query(&reader, "searchengine", 0..20).unwrap();

View File

@ -1,7 +1,7 @@
use std::io::{Read, Write}; use std::io::{Read, Write};
use hashbrown::HashMap; use hashbrown::HashMap;
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{DocumentId, Number}; use crate::{DocumentId, Number};

View File

@ -1,7 +1,7 @@
use std::fmt; use std::fmt;
use std::sync::Arc; use std::sync::Arc;
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
use sdset::SetBuf; use sdset::SetBuf;
use slice_group_by::GroupBy; use slice_group_by::GroupBy;

View File

@ -3,8 +3,8 @@ use std::convert::TryFrom;
use crate::{DocIndex, DocumentId}; use crate::{DocIndex, DocumentId};
use deunicode::deunicode_with_tofu; use deunicode::deunicode_with_tofu;
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
use meilidb_tokenizer::{is_cjk, SeqTokenizer, Token, Tokenizer}; use meilisearch_tokenizer::{is_cjk, SeqTokenizer, Token, Tokenizer};
use sdset::SetBuf; use sdset::SetBuf;
const WORD_LENGTH_LIMIT: usize = 80; const WORD_LENGTH_LIMIT: usize = 80;

View File

@ -2,12 +2,13 @@ use std::collections::HashSet;
use std::io::Cursor; use std::io::Cursor;
use std::{error::Error, fmt}; use std::{error::Error, fmt};
use meilidb_schema::{Schema, SchemaAttr}; use meilisearch_schema::{Schema, SchemaAttr};
use serde::{de, forward_to_deserialize_any}; use serde::{de, forward_to_deserialize_any};
use serde_json::de::IoRead as SerdeJsonIoRead; use serde_json::de::IoRead as SerdeJsonIoRead;
use serde_json::Deserializer as SerdeJsonDeserializer; use serde_json::Deserializer as SerdeJsonDeserializer;
use serde_json::Error as SerdeJsonError; use serde_json::Error as SerdeJsonError;
use crate::database::MainT;
use crate::store::DocumentsFields; use crate::store::DocumentsFields;
use crate::DocumentId; use crate::DocumentId;
@ -50,7 +51,7 @@ impl From<heed::Error> for DeserializerError {
pub struct Deserializer<'a> { pub struct Deserializer<'a> {
pub document_id: DocumentId, pub document_id: DocumentId,
pub reader: &'a heed::RoTxn, pub reader: &'a heed::RoTxn<MainT>,
pub documents_fields: DocumentsFields, pub documents_fields: DocumentsFields,
pub schema: &'a Schema, pub schema: &'a Schema,
pub attributes: Option<&'a HashSet<SchemaAttr>>, pub attributes: Option<&'a HashSet<SchemaAttr>>,

View File

@ -1,4 +1,4 @@
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
use serde::ser; use serde::ser;
use serde::Serialize; use serde::Serialize;

View File

@ -1,6 +1,7 @@
use meilidb_schema::{Schema, SchemaAttr, SchemaProps}; use meilisearch_schema::{Schema, SchemaAttr, SchemaProps};
use serde::ser; use serde::ser;
use crate::database::MainT;
use crate::raw_indexer::RawIndexer; use crate::raw_indexer::RawIndexer;
use crate::store::{DocumentsFields, DocumentsFieldsCounts}; use crate::store::{DocumentsFields, DocumentsFieldsCounts};
use crate::{DocumentId, RankedMap}; use crate::{DocumentId, RankedMap};
@ -8,7 +9,7 @@ use crate::{DocumentId, RankedMap};
use super::{ConvertToNumber, ConvertToString, Indexer, SerializerError}; use super::{ConvertToNumber, ConvertToString, Indexer, SerializerError};
pub struct Serializer<'a, 'b> { pub struct Serializer<'a, 'b> {
pub txn: &'a mut heed::RwTxn<'b>, pub txn: &'a mut heed::RwTxn<'b, MainT>,
pub schema: &'a Schema, pub schema: &'a Schema,
pub document_store: DocumentsFields, pub document_store: DocumentsFields,
pub document_fields_counts: DocumentsFieldsCounts, pub document_fields_counts: DocumentsFieldsCounts,
@ -191,7 +192,7 @@ impl<'a, 'b> ser::Serializer for Serializer<'a, 'b> {
} }
pub struct MapSerializer<'a, 'b> { pub struct MapSerializer<'a, 'b> {
txn: &'a mut heed::RwTxn<'b>, txn: &'a mut heed::RwTxn<'b, MainT>,
schema: &'a Schema, schema: &'a Schema,
document_id: DocumentId, document_id: DocumentId,
document_store: DocumentsFields, document_store: DocumentsFields,
@ -254,7 +255,7 @@ impl<'a, 'b> ser::SerializeMap for MapSerializer<'a, 'b> {
} }
pub struct StructSerializer<'a, 'b> { pub struct StructSerializer<'a, 'b> {
txn: &'a mut heed::RwTxn<'b>, txn: &'a mut heed::RwTxn<'b, MainT>,
schema: &'a Schema, schema: &'a Schema,
document_id: DocumentId, document_id: DocumentId,
document_store: DocumentsFields, document_store: DocumentsFields,
@ -297,7 +298,7 @@ impl<'a, 'b> ser::SerializeStruct for StructSerializer<'a, 'b> {
} }
pub fn serialize_value<T: ?Sized>( pub fn serialize_value<T: ?Sized>(
txn: &mut heed::RwTxn, txn: &mut heed::RwTxn<MainT>,
attribute: SchemaAttr, attribute: SchemaAttr,
props: SchemaProps, props: SchemaProps,
document_id: DocumentId, document_id: DocumentId,

View File

@ -1,4 +1,5 @@
use super::BEU64; use super::BEU64;
use crate::database::MainT;
use crate::DocumentId; use crate::DocumentId;
use heed::types::{ByteSlice, OwnedType}; use heed::types::{ByteSlice, OwnedType};
use heed::Result as ZResult; use heed::Result as ZResult;
@ -12,7 +13,7 @@ pub struct DocsWords {
impl DocsWords { impl DocsWords {
pub fn put_doc_words( pub fn put_doc_words(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
words: &fst::Set, words: &fst::Set,
) -> ZResult<()> { ) -> ZResult<()> {
@ -21,18 +22,18 @@ impl DocsWords {
self.docs_words.put(writer, &document_id, bytes) self.docs_words.put(writer, &document_id, bytes)
} }
pub fn del_doc_words(self, writer: &mut heed::RwTxn, document_id: DocumentId) -> ZResult<bool> { pub fn del_doc_words(self, writer: &mut heed::RwTxn<MainT>, document_id: DocumentId) -> ZResult<bool> {
let document_id = BEU64::new(document_id.0); let document_id = BEU64::new(document_id.0);
self.docs_words.delete(writer, &document_id) self.docs_words.delete(writer, &document_id)
} }
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.docs_words.clear(writer) self.docs_words.clear(writer)
} }
pub fn doc_words( pub fn doc_words(
self, self,
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<Option<fst::Set>> { ) -> ZResult<Option<fst::Set>> {
let document_id = BEU64::new(document_id.0); let document_id = BEU64::new(document_id.0);

View File

@ -1,6 +1,7 @@
use heed::types::{ByteSlice, OwnedType}; use heed::types::{ByteSlice, OwnedType};
use crate::database::MainT;
use heed::Result as ZResult; use heed::Result as ZResult;
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
use super::DocumentAttrKey; use super::DocumentAttrKey;
use crate::DocumentId; use crate::DocumentId;
@ -13,7 +14,7 @@ pub struct DocumentsFields {
impl DocumentsFields { impl DocumentsFields {
pub fn put_document_field( pub fn put_document_field(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
value: &[u8], value: &[u8],
@ -24,7 +25,7 @@ impl DocumentsFields {
pub fn del_all_document_fields( pub fn del_all_document_fields(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<usize> { ) -> ZResult<usize> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentAttrKey::new(document_id, SchemaAttr::min());
@ -32,13 +33,13 @@ impl DocumentsFields {
self.documents_fields.delete_range(writer, &(start..=end)) self.documents_fields.delete_range(writer, &(start..=end))
} }
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.documents_fields.clear(writer) self.documents_fields.clear(writer)
} }
pub fn document_attribute<'txn>( pub fn document_attribute<'txn>(
self, self,
reader: &'txn heed::RoTxn, reader: &'txn heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
) -> ZResult<Option<&'txn [u8]>> { ) -> ZResult<Option<&'txn [u8]>> {
@ -48,7 +49,7 @@ impl DocumentsFields {
pub fn document_fields<'txn>( pub fn document_fields<'txn>(
self, self,
reader: &'txn heed::RoTxn, reader: &'txn heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<DocumentFieldsIter<'txn>> { ) -> ZResult<DocumentFieldsIter<'txn>> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentAttrKey::new(document_id, SchemaAttr::min());

View File

@ -1,8 +1,9 @@
use super::DocumentAttrKey; use super::DocumentAttrKey;
use crate::database::MainT;
use crate::DocumentId; use crate::DocumentId;
use heed::types::OwnedType; use heed::types::OwnedType;
use heed::Result as ZResult; use heed::Result as ZResult;
use meilidb_schema::SchemaAttr; use meilisearch_schema::SchemaAttr;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct DocumentsFieldsCounts { pub struct DocumentsFieldsCounts {
@ -12,7 +13,7 @@ pub struct DocumentsFieldsCounts {
impl DocumentsFieldsCounts { impl DocumentsFieldsCounts {
pub fn put_document_field_count( pub fn put_document_field_count(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
value: u64, value: u64,
@ -23,7 +24,7 @@ impl DocumentsFieldsCounts {
pub fn del_all_document_fields_counts( pub fn del_all_document_fields_counts(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<usize> { ) -> ZResult<usize> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentAttrKey::new(document_id, SchemaAttr::min());
@ -32,13 +33,13 @@ impl DocumentsFieldsCounts {
.delete_range(writer, &(start..=end)) .delete_range(writer, &(start..=end))
} }
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.documents_fields_counts.clear(writer) self.documents_fields_counts.clear(writer)
} }
pub fn document_field_count( pub fn document_field_count(
self, self,
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
) -> ZResult<Option<u64>> { ) -> ZResult<Option<u64>> {
@ -51,7 +52,7 @@ impl DocumentsFieldsCounts {
pub fn document_fields_counts<'txn>( pub fn document_fields_counts<'txn>(
self, self,
reader: &'txn heed::RoTxn, reader: &'txn heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<DocumentFieldsCountsIter<'txn>> { ) -> ZResult<DocumentFieldsCountsIter<'txn>> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentAttrKey::new(document_id, SchemaAttr::min());
@ -60,7 +61,7 @@ impl DocumentsFieldsCounts {
Ok(DocumentFieldsCountsIter { iter }) Ok(DocumentFieldsCountsIter { iter })
} }
pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn) -> ZResult<DocumentsIdsIter<'txn>> { pub fn documents_ids<'txn>(self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<DocumentsIdsIter<'txn>> {
let iter = self.documents_fields_counts.iter(reader)?; let iter = self.documents_fields_counts.iter(reader)?;
Ok(DocumentsIdsIter { Ok(DocumentsIdsIter {
last_seen_id: None, last_seen_id: None,
@ -70,7 +71,7 @@ impl DocumentsFieldsCounts {
pub fn all_documents_fields_counts<'txn>( pub fn all_documents_fields_counts<'txn>(
self, self,
reader: &'txn heed::RoTxn, reader: &'txn heed::RoTxn<MainT>,
) -> ZResult<AllDocumentsFieldsCountsIter<'txn>> { ) -> ZResult<AllDocumentsFieldsCountsIter<'txn>> {
let iter = self.documents_fields_counts.iter(reader)?; let iter = self.documents_fields_counts.iter(reader)?;
Ok(AllDocumentsFieldsCountsIter { iter }) Ok(AllDocumentsFieldsCountsIter { iter })

View File

@ -0,0 +1,184 @@
use crate::database::MainT;
use crate::RankedMap;
use chrono::{DateTime, Utc};
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
use heed::Result as ZResult;
use meilisearch_schema::Schema;
use std::collections::HashMap;
use std::sync::Arc;
const CREATED_AT_KEY: &str = "created-at";
const CUSTOMS_KEY: &str = "customs-key";
const FIELDS_FREQUENCY_KEY: &str = "fields-frequency";
const NAME_KEY: &str = "name";
const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents";
const RANKED_MAP_KEY: &str = "ranked-map";
const SCHEMA_KEY: &str = "schema";
const STOP_WORDS_KEY: &str = "stop-words";
const SYNONYMS_KEY: &str = "synonyms";
const UPDATED_AT_KEY: &str = "updated-at";
const WORDS_KEY: &str = "words";
pub type FreqsMap = HashMap<String, usize>;
type SerdeFreqsMap = SerdeBincode<FreqsMap>;
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
#[derive(Copy, Clone)]
pub struct Main {
pub(crate) main: heed::PolyDatabase,
}
impl Main {
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.main.clear(writer)
}
pub fn put_name(self, writer: &mut heed::RwTxn<MainT>, name: &str) -> ZResult<()> {
self.main.put::<_, Str, Str>(writer, NAME_KEY, name)
}
pub fn name(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> {
Ok(self
.main
.get::<_, Str, Str>(reader, NAME_KEY)?
.map(|name| name.to_owned()))
}
pub fn put_created_at(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.main
.put::<_, Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now())
}
pub fn created_at(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<DateTime<Utc>>> {
self.main.get::<_, Str, SerdeDatetime>(reader, CREATED_AT_KEY)
}
pub fn put_updated_at(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.main
.put::<_, Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now())
}
pub fn updated_at(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<DateTime<Utc>>> {
self.main.get::<_, Str, SerdeDatetime>(reader, UPDATED_AT_KEY)
}
pub fn put_words_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes();
self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, bytes)
}
pub fn words_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<fst::Set>> {
match self.main.get::<_, Str, ByteSlice>(reader, WORDS_KEY)? {
Some(bytes) => {
let len = bytes.len();
let bytes = Arc::new(bytes.to_owned());
let fst = fst::raw::Fst::from_shared_bytes(bytes, 0, len).unwrap();
Ok(Some(fst::Set::from(fst)))
}
None => Ok(None),
}
}
pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> ZResult<()> {
self.main
.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)
}
pub fn schema(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Schema>> {
self.main
.get::<_, Str, SerdeBincode<Schema>>(reader, SCHEMA_KEY)
}
pub fn put_ranked_map(self, writer: &mut heed::RwTxn<MainT>, ranked_map: &RankedMap) -> ZResult<()> {
self.main
.put::<_, Str, SerdeBincode<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map)
}
pub fn ranked_map(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<RankedMap>> {
self.main
.get::<_, Str, SerdeBincode<RankedMap>>(reader, RANKED_MAP_KEY)
}
pub fn put_synonyms_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes();
self.main.put::<_, Str, ByteSlice>(writer, SYNONYMS_KEY, bytes)
}
pub fn synonyms_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<fst::Set>> {
match self.main.get::<_, Str, ByteSlice>(reader, SYNONYMS_KEY)? {
Some(bytes) => {
let len = bytes.len();
let bytes = Arc::new(bytes.to_owned());
let fst = fst::raw::Fst::from_shared_bytes(bytes, 0, len).unwrap();
Ok(Some(fst::Set::from(fst)))
}
None => Ok(None),
}
}
pub fn put_stop_words_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes();
self.main
.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes)
}
pub fn stop_words_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<fst::Set>> {
match self.main.get::<_, Str, ByteSlice>(reader, STOP_WORDS_KEY)? {
Some(bytes) => {
let len = bytes.len();
let bytes = Arc::new(bytes.to_owned());
let fst = fst::raw::Fst::from_shared_bytes(bytes, 0, len).unwrap();
Ok(Some(fst::Set::from(fst)))
}
None => Ok(None),
}
}
pub fn put_number_of_documents<F>(self, writer: &mut heed::RwTxn<MainT>, f: F) -> ZResult<u64>
where
F: Fn(u64) -> u64,
{
let new = self.number_of_documents(&*writer).map(f)?;
self.main
.put::<_, Str, OwnedType<u64>>(writer, NUMBER_OF_DOCUMENTS_KEY, &new)?;
Ok(new)
}
pub fn number_of_documents(self, reader: &heed::RoTxn<MainT>) -> ZResult<u64> {
match self
.main
.get::<_, Str, OwnedType<u64>>(reader, NUMBER_OF_DOCUMENTS_KEY)?
{
Some(value) => Ok(value),
None => Ok(0),
}
}
pub fn put_fields_frequency(
self,
writer: &mut heed::RwTxn<MainT>,
fields_frequency: &FreqsMap,
) -> ZResult<()> {
self.main
.put::<_, Str, SerdeFreqsMap>(writer, FIELDS_FREQUENCY_KEY, fields_frequency)
}
pub fn fields_frequency(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<FreqsMap>> {
match self
.main
.get::<_, Str, SerdeFreqsMap>(reader, FIELDS_FREQUENCY_KEY)?
{
Some(freqs) => Ok(Some(freqs)),
None => Ok(None),
}
}
pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> ZResult<()> {
self.main
.put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs)
}
pub fn customs<'txn>(self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<Option<&'txn [u8]>> {
self.main.get::<_, Str, ByteSlice>(reader, CUSTOMS_KEY)
}
}

View File

@ -21,12 +21,13 @@ pub use self::updates_results::UpdatesResults;
use std::collections::HashSet; use std::collections::HashSet;
use heed::Result as ZResult; use heed::Result as ZResult;
use meilidb_schema::{Schema, SchemaAttr}; use meilisearch_schema::{Schema, SchemaAttr};
use serde::de::{self, Deserialize}; use serde::de::{self, Deserialize};
use zerocopy::{AsBytes, FromBytes}; use zerocopy::{AsBytes, FromBytes};
use crate::criterion::Criteria; use crate::criterion::Criteria;
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::database::{MainT, UpdateT};
use crate::serde::Deserializer; use crate::serde::Deserializer;
use crate::{query_builder::QueryBuilder, update, DocumentId, Error, MResult}; use crate::{query_builder::QueryBuilder, update, DocumentId, Error, MResult};
@ -92,13 +93,13 @@ pub struct Index {
pub updates: Updates, pub updates: Updates,
pub updates_results: UpdatesResults, pub updates_results: UpdatesResults,
updates_notifier: UpdateEventsEmitter, pub(crate) updates_notifier: UpdateEventsEmitter,
} }
impl Index { impl Index {
pub fn document<T: de::DeserializeOwned>( pub fn document<T: de::DeserializeOwned>(
&self, &self,
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
attributes: Option<&HashSet<&str>>, attributes: Option<&HashSet<&str>>,
document_id: DocumentId, document_id: DocumentId,
) -> MResult<Option<T>> { ) -> MResult<Option<T>> {
@ -126,7 +127,7 @@ impl Index {
pub fn document_attribute<T: de::DeserializeOwned>( pub fn document_attribute<T: de::DeserializeOwned>(
&self, &self,
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
) -> MResult<Option<T>> { ) -> MResult<Option<T>> {
@ -139,12 +140,12 @@ impl Index {
} }
} }
pub fn schema_update(&self, writer: &mut heed::RwTxn, schema: Schema) -> MResult<u64> { pub fn schema_update(&self, writer: &mut heed::RwTxn<UpdateT>, schema: Schema) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_schema_update(writer, self.updates, self.updates_results, schema) update::push_schema_update(writer, self.updates, self.updates_results, schema)
} }
pub fn customs_update(&self, writer: &mut heed::RwTxn, customs: Vec<u8>) -> ZResult<u64> { pub fn customs_update(&self, writer: &mut heed::RwTxn<UpdateT>, customs: Vec<u8>) -> ZResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_customs_update(writer, self.updates, self.updates_results, customs) update::push_customs_update(writer, self.updates, self.updates_results, customs)
} }
@ -173,7 +174,7 @@ impl Index {
) )
} }
pub fn clear_all(&self, writer: &mut heed::RwTxn) -> MResult<u64> { pub fn clear_all(&self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_clear_all(writer, self.updates, self.updates_results) update::push_clear_all(writer, self.updates, self.updates_results)
} }
@ -210,8 +211,8 @@ impl Index {
) )
} }
pub fn current_update_id(&self, reader: &heed::RoTxn) -> MResult<Option<u64>> { pub fn current_update_id(&self, reader: &heed::RoTxn<UpdateT>) -> MResult<Option<u64>> {
match self.updates.last_update_id(reader)? { match self.updates.last_update(reader)? {
Some((id, _)) => Ok(Some(id)), Some((id, _)) => Ok(Some(id)),
None => Ok(None), None => Ok(None),
} }
@ -219,18 +220,18 @@ impl Index {
pub fn update_status( pub fn update_status(
&self, &self,
reader: &heed::RoTxn, reader: &heed::RoTxn<UpdateT>,
update_id: u64, update_id: u64,
) -> MResult<Option<update::UpdateStatus>> { ) -> MResult<Option<update::UpdateStatus>> {
update::update_status(reader, self.updates, self.updates_results, update_id) update::update_status(reader, self.updates, self.updates_results, update_id)
} }
pub fn all_updates_status(&self, reader: &heed::RoTxn) -> MResult<Vec<update::UpdateStatus>> { pub fn all_updates_status(&self, reader: &heed::RoTxn<UpdateT>) -> MResult<Vec<update::UpdateStatus>> {
let mut updates = Vec::new(); let mut updates = Vec::new();
let mut last_update_result_id = 0; let mut last_update_result_id = 0;
// retrieve all updates results // retrieve all updates results
if let Some((last_id, _)) = self.updates_results.last_update_id(reader)? { if let Some((last_id, _)) = self.updates_results.last_update(reader)? {
updates.reserve(last_id as usize); updates.reserve(last_id as usize);
for id in 0..=last_id { for id in 0..=last_id {
@ -242,7 +243,7 @@ impl Index {
} }
// retrieve all enqueued updates // retrieve all enqueued updates
if let Some((last_id, _)) = self.updates.last_update_id(reader)? { if let Some((last_id, _)) = self.updates.last_update(reader)? {
for id in last_update_result_id + 1..=last_id { for id in last_update_result_id + 1..=last_id {
if let Some(update) = self.update_status(reader, id)? { if let Some(update) = self.update_status(reader, id)? {
updates.push(update); updates.push(update);
@ -278,6 +279,7 @@ impl Index {
pub fn create( pub fn create(
env: &heed::Env, env: &heed::Env,
update_env: &heed::Env,
name: &str, name: &str,
updates_notifier: UpdateEventsEmitter, updates_notifier: UpdateEventsEmitter,
) -> MResult<Index> { ) -> MResult<Index> {
@ -298,8 +300,8 @@ pub fn create(
let documents_fields_counts = env.create_database(Some(&documents_fields_counts_name))?; let documents_fields_counts = env.create_database(Some(&documents_fields_counts_name))?;
let synonyms = env.create_database(Some(&synonyms_name))?; let synonyms = env.create_database(Some(&synonyms_name))?;
let docs_words = env.create_database(Some(&docs_words_name))?; let docs_words = env.create_database(Some(&docs_words_name))?;
let updates = env.create_database(Some(&updates_name))?; let updates = update_env.create_database(Some(&updates_name))?;
let updates_results = env.create_database(Some(&updates_results_name))?; let updates_results = update_env.create_database(Some(&updates_results_name))?;
Ok(Index { Ok(Index {
main: Main { main }, main: Main { main },
@ -318,6 +320,7 @@ pub fn create(
pub fn open( pub fn open(
env: &heed::Env, env: &heed::Env,
update_env: &heed::Env,
name: &str, name: &str,
updates_notifier: UpdateEventsEmitter, updates_notifier: UpdateEventsEmitter,
) -> MResult<Option<Index>> { ) -> MResult<Option<Index>> {
@ -356,11 +359,11 @@ pub fn open(
Some(docs_words) => docs_words, Some(docs_words) => docs_words,
None => return Ok(None), None => return Ok(None),
}; };
let updates = match env.open_database(Some(&updates_name))? { let updates = match update_env.open_database(Some(&updates_name))? {
Some(updates) => updates, Some(updates) => updates,
None => return Ok(None), None => return Ok(None),
}; };
let updates_results = match env.open_database(Some(&updates_results_name))? { let updates_results = match update_env.open_database(Some(&updates_results_name))? {
Some(updates_results) => updates_results, Some(updates_results) => updates_results,
None => return Ok(None), None => return Ok(None),
}; };
@ -380,10 +383,11 @@ pub fn open(
})) }))
} }
pub fn clear(writer: &mut heed::RwTxn, index: &Index) -> MResult<()> { pub fn clear(
// send a stop event to the update loop of the index writer: &mut heed::RwTxn<MainT>,
index.updates_notifier.send(UpdateEvent::MustStop).unwrap(); update_writer: &mut heed::RwTxn<UpdateT>,
index: &Index,
) -> MResult<()> {
// clear all the stores // clear all the stores
index.main.clear(writer)?; index.main.clear(writer)?;
index.postings_lists.clear(writer)?; index.postings_lists.clear(writer)?;
@ -391,7 +395,7 @@ pub fn clear(writer: &mut heed::RwTxn, index: &Index) -> MResult<()> {
index.documents_fields_counts.clear(writer)?; index.documents_fields_counts.clear(writer)?;
index.synonyms.clear(writer)?; index.synonyms.clear(writer)?;
index.docs_words.clear(writer)?; index.docs_words.clear(writer)?;
index.updates.clear(writer)?; index.updates.clear(update_writer)?;
index.updates_results.clear(writer)?; index.updates_results.clear(update_writer)?;
Ok(()) Ok(())
} }

View File

@ -1,4 +1,5 @@
use crate::DocIndex; use crate::DocIndex;
use crate::database::MainT;
use heed::types::{ByteSlice, CowSlice}; use heed::types::{ByteSlice, CowSlice};
use heed::Result as ZResult; use heed::Result as ZResult;
use sdset::{Set, SetBuf}; use sdset::{Set, SetBuf};
@ -12,24 +13,24 @@ pub struct PostingsLists {
impl PostingsLists { impl PostingsLists {
pub fn put_postings_list( pub fn put_postings_list(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
word: &[u8], word: &[u8],
words_indexes: &Set<DocIndex>, words_indexes: &Set<DocIndex>,
) -> ZResult<()> { ) -> ZResult<()> {
self.postings_lists.put(writer, word, words_indexes) self.postings_lists.put(writer, word, words_indexes)
} }
pub fn del_postings_list(self, writer: &mut heed::RwTxn, word: &[u8]) -> ZResult<bool> { pub fn del_postings_list(self, writer: &mut heed::RwTxn<MainT>, word: &[u8]) -> ZResult<bool> {
self.postings_lists.delete(writer, word) self.postings_lists.delete(writer, word)
} }
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.postings_lists.clear(writer) self.postings_lists.clear(writer)
} }
pub fn postings_list<'txn>( pub fn postings_list<'txn>(
self, self,
reader: &'txn heed::RoTxn, reader: &'txn heed::RoTxn<MainT>,
word: &[u8], word: &[u8],
) -> ZResult<Option<Cow<'txn, Set<DocIndex>>>> { ) -> ZResult<Option<Cow<'txn, Set<DocIndex>>>> {
match self.postings_lists.get(reader, word)? { match self.postings_lists.get(reader, word)? {

View File

@ -1,4 +1,5 @@
use heed::types::ByteSlice; use heed::types::ByteSlice;
use crate::database::MainT;
use heed::Result as ZResult; use heed::Result as ZResult;
use std::sync::Arc; use std::sync::Arc;
@ -10,7 +11,7 @@ pub struct Synonyms {
impl Synonyms { impl Synonyms {
pub fn put_synonyms( pub fn put_synonyms(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
word: &[u8], word: &[u8],
synonyms: &fst::Set, synonyms: &fst::Set,
) -> ZResult<()> { ) -> ZResult<()> {
@ -18,15 +19,15 @@ impl Synonyms {
self.synonyms.put(writer, word, bytes) self.synonyms.put(writer, word, bytes)
} }
pub fn del_synonyms(self, writer: &mut heed::RwTxn, word: &[u8]) -> ZResult<bool> { pub fn del_synonyms(self, writer: &mut heed::RwTxn<MainT>, word: &[u8]) -> ZResult<bool> {
self.synonyms.delete(writer, word) self.synonyms.delete(writer, word)
} }
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.synonyms.clear(writer) self.synonyms.clear(writer)
} }
pub fn synonyms(self, reader: &heed::RoTxn, word: &[u8]) -> ZResult<Option<fst::Set>> { pub fn synonyms(self, reader: &heed::RoTxn<MainT>, word: &[u8]) -> ZResult<Option<fst::Set>> {
match self.synonyms.get(reader, word)? { match self.synonyms.get(reader, word)? {
Some(bytes) => { Some(bytes) => {
let len = bytes.len(); let len = bytes.len();

View File

@ -1,4 +1,5 @@
use super::BEU64; use super::BEU64;
use crate::database::UpdateT;
use crate::update::Update; use crate::update::Update;
use heed::types::{OwnedType, SerdeJson}; use heed::types::{OwnedType, SerdeJson};
use heed::Result as ZResult; use heed::Result as ZResult;
@ -10,7 +11,7 @@ pub struct Updates {
impl Updates { impl Updates {
// TODO do not trigger deserialize if possible // TODO do not trigger deserialize if possible
pub fn last_update_id(self, reader: &heed::RoTxn) -> ZResult<Option<(u64, Update)>> { pub fn last_update(self, reader: &heed::RoTxn<UpdateT>) -> ZResult<Option<(u64, Update)>> {
match self.updates.last(reader)? { match self.updates.last(reader)? {
Some((key, data)) => Ok(Some((key.get(), data))), Some((key, data)) => Ok(Some((key.get(), data))),
None => Ok(None), None => Ok(None),
@ -18,7 +19,7 @@ impl Updates {
} }
// TODO do not trigger deserialize if possible // TODO do not trigger deserialize if possible
fn first_update_id(self, reader: &heed::RoTxn) -> ZResult<Option<(u64, Update)>> { pub fn first_update(self, reader: &heed::RoTxn<UpdateT>) -> ZResult<Option<(u64, Update)>> {
match self.updates.first(reader)? { match self.updates.first(reader)? {
Some((key, data)) => Ok(Some((key.get(), data))), Some((key, data)) => Ok(Some((key.get(), data))),
None => Ok(None), None => Ok(None),
@ -26,14 +27,14 @@ impl Updates {
} }
// TODO do not trigger deserialize if possible // TODO do not trigger deserialize if possible
pub fn get(self, reader: &heed::RoTxn, update_id: u64) -> ZResult<Option<Update>> { pub fn get(self, reader: &heed::RoTxn<UpdateT>, update_id: u64) -> ZResult<Option<Update>> {
let update_id = BEU64::new(update_id); let update_id = BEU64::new(update_id);
self.updates.get(reader, &update_id) self.updates.get(reader, &update_id)
} }
pub fn put_update( pub fn put_update(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
update_id: u64, update_id: u64,
update: &Update, update: &Update,
) -> ZResult<()> { ) -> ZResult<()> {
@ -42,8 +43,13 @@ impl Updates {
self.updates.put(writer, &update_id, update) self.updates.put(writer, &update_id, update)
} }
pub fn pop_front(self, writer: &mut heed::RwTxn) -> ZResult<Option<(u64, Update)>> { pub fn del_update(self, writer: &mut heed::RwTxn<UpdateT>, update_id: u64) -> ZResult<bool> {
match self.first_update_id(writer)? { let update_id = BEU64::new(update_id);
self.updates.delete(writer, &update_id)
}
pub fn pop_front(self, writer: &mut heed::RwTxn<UpdateT>) -> ZResult<Option<(u64, Update)>> {
match self.first_update(writer)? {
Some((update_id, update)) => { Some((update_id, update)) => {
let key = BEU64::new(update_id); let key = BEU64::new(update_id);
self.updates.delete(writer, &key)?; self.updates.delete(writer, &key)?;
@ -53,7 +59,7 @@ impl Updates {
} }
} }
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<UpdateT>) -> ZResult<()> {
self.updates.clear(writer) self.updates.clear(writer)
} }
} }

View File

@ -1,4 +1,5 @@
use super::BEU64; use super::BEU64;
use crate::database::UpdateT;
use crate::update::ProcessedUpdateResult; use crate::update::ProcessedUpdateResult;
use heed::types::{OwnedType, SerdeJson}; use heed::types::{OwnedType, SerdeJson};
use heed::Result as ZResult; use heed::Result as ZResult;
@ -9,9 +10,9 @@ pub struct UpdatesResults {
} }
impl UpdatesResults { impl UpdatesResults {
pub fn last_update_id( pub fn last_update(
self, self,
reader: &heed::RoTxn, reader: &heed::RoTxn<UpdateT>,
) -> ZResult<Option<(u64, ProcessedUpdateResult)>> { ) -> ZResult<Option<(u64, ProcessedUpdateResult)>> {
match self.updates_results.last(reader)? { match self.updates_results.last(reader)? {
Some((key, data)) => Ok(Some((key.get(), data))), Some((key, data)) => Ok(Some((key.get(), data))),
@ -21,7 +22,7 @@ impl UpdatesResults {
pub fn put_update_result( pub fn put_update_result(
self, self,
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
update_id: u64, update_id: u64,
update_result: &ProcessedUpdateResult, update_result: &ProcessedUpdateResult,
) -> ZResult<()> { ) -> ZResult<()> {
@ -31,14 +32,14 @@ impl UpdatesResults {
pub fn update_result( pub fn update_result(
self, self,
reader: &heed::RoTxn, reader: &heed::RoTxn<UpdateT>,
update_id: u64, update_id: u64,
) -> ZResult<Option<ProcessedUpdateResult>> { ) -> ZResult<Option<ProcessedUpdateResult>> {
let update_id = BEU64::new(update_id); let update_id = BEU64::new(update_id);
self.updates_results.get(reader, &update_id) self.updates_results.get(reader, &update_id)
} }
pub fn clear(self, writer: &mut heed::RwTxn) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<UpdateT>) -> ZResult<()> {
self.updates_results.clear(writer) self.updates_results.clear(writer)
} }
} }

View File

@ -1,8 +1,9 @@
use crate::database::{MainT, UpdateT};
use crate::update::{next_update_id, Update}; use crate::update::{next_update_id, Update};
use crate::{store, MResult, RankedMap}; use crate::{store, MResult, RankedMap};
pub fn apply_clear_all( pub fn apply_clear_all(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
documents_fields_store: store::DocumentsFields, documents_fields_store: store::DocumentsFields,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
@ -21,7 +22,7 @@ pub fn apply_clear_all(
} }
pub fn push_clear_all( pub fn push_clear_all(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
) -> MResult<u64> { ) -> MResult<u64> {

View File

@ -1,9 +1,11 @@
use crate::store;
use crate::update::{next_update_id, Update};
use heed::Result as ZResult; use heed::Result as ZResult;
use crate::database::{MainT, UpdateT};
use crate::store;
use crate::update::{next_update_id, Update};
pub fn apply_customs_update( pub fn apply_customs_update(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
customs: &[u8], customs: &[u8],
) -> ZResult<()> { ) -> ZResult<()> {
@ -11,7 +13,7 @@ pub fn apply_customs_update(
} }
pub fn push_customs_update( pub fn push_customs_update(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
customs: Vec<u8>, customs: Vec<u8>,

View File

@ -4,6 +4,7 @@ use fst::{set::OpBuilder, SetBuilder};
use sdset::{duo::Union, SetOperation}; use sdset::{duo::Union, SetOperation};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::database::{MainT, UpdateT};
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::raw_indexer::RawIndexer; use crate::raw_indexer::RawIndexer;
use crate::serde::{extract_document_id, serialize_value, Deserializer, Serializer}; use crate::serde::{extract_document_id, serialize_value, Deserializer, Serializer};
@ -52,7 +53,7 @@ impl<D> DocumentsAddition<D> {
self.documents.push(document); self.documents.push(document);
} }
pub fn finalize(self, writer: &mut heed::RwTxn) -> MResult<u64> pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64>
where where
D: serde::Serialize, D: serde::Serialize,
{ {
@ -75,7 +76,7 @@ impl<D> Extend<D> for DocumentsAddition<D> {
} }
pub fn push_documents_addition<D: serde::Serialize>( pub fn push_documents_addition<D: serde::Serialize>(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
addition: Vec<D>, addition: Vec<D>,
@ -102,7 +103,7 @@ pub fn push_documents_addition<D: serde::Serialize>(
} }
pub fn apply_documents_addition<'a, 'b>( pub fn apply_documents_addition<'a, 'b>(
writer: &'a mut heed::RwTxn<'b>, writer: &'a mut heed::RwTxn<'b, MainT>,
main_store: store::Main, main_store: store::Main,
documents_fields_store: store::DocumentsFields, documents_fields_store: store::DocumentsFields,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
@ -181,7 +182,7 @@ pub fn apply_documents_addition<'a, 'b>(
} }
pub fn apply_documents_partial_addition<'a, 'b>( pub fn apply_documents_partial_addition<'a, 'b>(
writer: &'a mut heed::RwTxn<'b>, writer: &'a mut heed::RwTxn<'b, MainT>,
main_store: store::Main, main_store: store::Main,
documents_fields_store: store::DocumentsFields, documents_fields_store: store::DocumentsFields,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
@ -277,7 +278,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
} }
pub fn reindex_all_documents( pub fn reindex_all_documents(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
documents_fields_store: store::DocumentsFields, documents_fields_store: store::DocumentsFields,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
@ -354,7 +355,7 @@ pub fn reindex_all_documents(
} }
pub fn write_documents_addition_index( pub fn write_documents_addition_index(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
docs_words_store: store::DocsWords, docs_words_store: store::DocsWords,

View File

@ -1,9 +1,10 @@
use std::collections::{BTreeSet, HashMap, HashSet}; use std::collections::{BTreeSet, HashMap, HashSet};
use fst::{SetBuilder, Streamer}; use fst::{SetBuilder, Streamer};
use meilidb_schema::Schema; use meilisearch_schema::Schema;
use sdset::{duo::DifferenceByKey, SetBuf, SetOperation}; use sdset::{duo::DifferenceByKey, SetBuf, SetOperation};
use crate::database::{MainT, UpdateT};
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::serde::extract_document_id; use crate::serde::extract_document_id;
use crate::store; use crate::store;
@ -50,7 +51,7 @@ impl DocumentsDeletion {
Ok(()) Ok(())
} }
pub fn finalize(self, writer: &mut heed::RwTxn) -> MResult<u64> { pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_documents_deletion( let update_id = push_documents_deletion(
writer, writer,
@ -69,7 +70,7 @@ impl Extend<DocumentId> for DocumentsDeletion {
} }
pub fn push_documents_deletion( pub fn push_documents_deletion(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
deletion: Vec<DocumentId>, deletion: Vec<DocumentId>,
@ -83,7 +84,7 @@ pub fn push_documents_deletion(
} }
pub fn apply_documents_deletion( pub fn apply_documents_deletion(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
documents_fields_store: store::DocumentsFields, documents_fields_store: store::DocumentsFields,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,

View File

@ -30,7 +30,8 @@ use log::debug;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{store, DocumentId, MResult}; use crate::{store, DocumentId, MResult};
use meilidb_schema::Schema; use crate::database::{MainT, UpdateT};
use meilisearch_schema::Schema;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Update { pub struct Update {
@ -203,14 +204,14 @@ pub enum UpdateStatus {
} }
pub fn update_status( pub fn update_status(
reader: &heed::RoTxn, update_reader: &heed::RoTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
update_id: u64, update_id: u64,
) -> MResult<Option<UpdateStatus>> { ) -> MResult<Option<UpdateStatus>> {
match updates_results_store.update_result(reader, update_id)? { match updates_results_store.update_result(update_reader, update_id)? {
Some(result) => Ok(Some(UpdateStatus::Processed { content: result })), Some(result) => Ok(Some(UpdateStatus::Processed { content: result })),
None => match updates_store.get(reader, update_id)? { None => match updates_store.get(update_reader, update_id)? {
Some(update) => Ok(Some(UpdateStatus::Enqueued { Some(update) => Ok(Some(UpdateStatus::Enqueued {
content: EnqueuedUpdateResult { content: EnqueuedUpdateResult {
update_id, update_id,
@ -224,25 +225,25 @@ pub fn update_status(
} }
pub fn next_update_id( pub fn next_update_id(
writer: &mut heed::RwTxn, update_writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
) -> ZResult<u64> { ) -> ZResult<u64> {
let last_update_id = updates_store.last_update_id(writer)?; let last_update = updates_store.last_update(update_writer)?;
let last_update_id = last_update_id.map(|(n, _)| n); let last_update = last_update.map(|(n, _)| n);
let last_update_results_id = updates_results_store.last_update_id(writer)?; let last_update_results_id = updates_results_store.last_update(update_writer)?;
let last_update_results_id = last_update_results_id.map(|(n, _)| n); let last_update_results_id = last_update_results_id.map(|(n, _)| n);
let max_update_id = cmp::max(last_update_id, last_update_results_id); let max_update_id = cmp::max(last_update, last_update_results_id);
let new_update_id = max_update_id.map_or(0, |n| n + 1); let new_update_id = max_update_id.map_or(0, |n| n + 1);
Ok(new_update_id) Ok(new_update_id)
} }
pub fn update_task<'a, 'b>( pub fn update_task<'a, 'b>(
writer: &'a mut heed::RwTxn<'b>, writer: &'a mut heed::RwTxn<'b, MainT>,
index: store::Index, index: &store::Index,
update_id: u64, update_id: u64,
update: Update, update: Update,
) -> MResult<ProcessedUpdateResult> { ) -> MResult<ProcessedUpdateResult> {

View File

@ -1,11 +1,12 @@
use meilidb_schema::{Diff, Schema}; use meilisearch_schema::{Diff, Schema};
use crate::database::{MainT, UpdateT};
use crate::update::documents_addition::reindex_all_documents; use crate::update::documents_addition::reindex_all_documents;
use crate::update::{next_update_id, Update}; use crate::update::{next_update_id, Update};
use crate::{error::UnsupportedOperation, store, MResult}; use crate::{error::UnsupportedOperation, store, MResult};
pub fn apply_schema_update( pub fn apply_schema_update(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
new_schema: &Schema, new_schema: &Schema,
main_store: store::Main, main_store: store::Main,
documents_fields_store: store::DocumentsFields, documents_fields_store: store::DocumentsFields,
@ -21,7 +22,7 @@ pub fn apply_schema_update(
let mut need_full_reindexing = false; let mut need_full_reindexing = false;
if let Some(old_schema) = main_store.schema(writer)? { if let Some(old_schema) = main_store.schema(writer)? {
for diff in meilidb_schema::diff(&old_schema, new_schema) { for diff in meilisearch_schema::diff(&old_schema, new_schema) {
match diff { match diff {
Diff::IdentChange { .. } => return Err(CannotUpdateSchemaIdentifier.into()), Diff::IdentChange { .. } => return Err(CannotUpdateSchemaIdentifier.into()),
Diff::AttrMove { .. } => return Err(CannotReorderSchemaAttribute.into()), Diff::AttrMove { .. } => return Err(CannotReorderSchemaAttribute.into()),
@ -61,7 +62,7 @@ pub fn apply_schema_update(
} }
pub fn push_schema_update( pub fn push_schema_update(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
schema: Schema, schema: Schema,

View File

@ -2,6 +2,7 @@ use std::collections::BTreeSet;
use fst::{set::OpBuilder, SetBuilder}; use fst::{set::OpBuilder, SetBuilder};
use crate::database::{MainT, UpdateT};
use crate::automaton::normalize_str; use crate::automaton::normalize_str;
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::update::{next_update_id, Update}; use crate::update::{next_update_id, Update};
@ -33,7 +34,7 @@ impl StopWordsAddition {
self.stop_words.insert(stop_word); self.stop_words.insert(stop_word);
} }
pub fn finalize(self, writer: &mut heed::RwTxn) -> MResult<u64> { pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_stop_words_addition( let update_id = push_stop_words_addition(
writer, writer,
@ -46,7 +47,7 @@ impl StopWordsAddition {
} }
pub fn push_stop_words_addition( pub fn push_stop_words_addition(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
addition: BTreeSet<String>, addition: BTreeSet<String>,
@ -60,7 +61,7 @@ pub fn push_stop_words_addition(
} }
pub fn apply_stop_words_addition( pub fn apply_stop_words_addition(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
addition: BTreeSet<String>, addition: BTreeSet<String>,

View File

@ -2,6 +2,7 @@ use std::collections::BTreeSet;
use fst::{set::OpBuilder, SetBuilder}; use fst::{set::OpBuilder, SetBuilder};
use crate::database::{MainT, UpdateT};
use crate::automaton::normalize_str; use crate::automaton::normalize_str;
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::update::documents_addition::reindex_all_documents; use crate::update::documents_addition::reindex_all_documents;
@ -34,7 +35,7 @@ impl StopWordsDeletion {
self.stop_words.insert(stop_word); self.stop_words.insert(stop_word);
} }
pub fn finalize(self, writer: &mut heed::RwTxn) -> MResult<u64> { pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_stop_words_deletion( let update_id = push_stop_words_deletion(
writer, writer,
@ -47,7 +48,7 @@ impl StopWordsDeletion {
} }
pub fn push_stop_words_deletion( pub fn push_stop_words_deletion(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
deletion: BTreeSet<String>, deletion: BTreeSet<String>,
@ -61,7 +62,7 @@ pub fn push_stop_words_deletion(
} }
pub fn apply_stop_words_deletion( pub fn apply_stop_words_deletion(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
documents_fields_store: store::DocumentsFields, documents_fields_store: store::DocumentsFields,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,

View File

@ -3,6 +3,7 @@ use std::collections::BTreeMap;
use fst::{set::OpBuilder, SetBuilder}; use fst::{set::OpBuilder, SetBuilder};
use sdset::SetBuf; use sdset::SetBuf;
use crate::database::{MainT, UpdateT};
use crate::automaton::normalize_str; use crate::automaton::normalize_str;
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::update::{next_update_id, Update}; use crate::update::{next_update_id, Update};
@ -43,7 +44,7 @@ impl SynonymsAddition {
.extend(alternatives); .extend(alternatives);
} }
pub fn finalize(self, writer: &mut heed::RwTxn) -> MResult<u64> { pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_synonyms_addition( let update_id = push_synonyms_addition(
writer, writer,
@ -56,7 +57,7 @@ impl SynonymsAddition {
} }
pub fn push_synonyms_addition( pub fn push_synonyms_addition(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
addition: BTreeMap<String, Vec<String>>, addition: BTreeMap<String, Vec<String>>,
@ -70,7 +71,7 @@ pub fn push_synonyms_addition(
} }
pub fn apply_synonyms_addition( pub fn apply_synonyms_addition(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
synonyms_store: store::Synonyms, synonyms_store: store::Synonyms,
addition: BTreeMap<String, Vec<String>>, addition: BTreeMap<String, Vec<String>>,

View File

@ -4,6 +4,7 @@ use std::iter::FromIterator;
use fst::{set::OpBuilder, SetBuilder}; use fst::{set::OpBuilder, SetBuilder};
use sdset::SetBuf; use sdset::SetBuf;
use crate::database::{MainT, UpdateT};
use crate::automaton::normalize_str; use crate::automaton::normalize_str;
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::update::{next_update_id, Update}; use crate::update::{next_update_id, Update};
@ -50,7 +51,7 @@ impl SynonymsDeletion {
} }
} }
pub fn finalize(self, writer: &mut heed::RwTxn) -> MResult<u64> { pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_synonyms_deletion( let update_id = push_synonyms_deletion(
writer, writer,
@ -63,7 +64,7 @@ impl SynonymsDeletion {
} }
pub fn push_synonyms_deletion( pub fn push_synonyms_deletion(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates, updates_store: store::Updates,
updates_results_store: store::UpdatesResults, updates_results_store: store::UpdatesResults,
deletion: BTreeMap<String, Option<Vec<String>>>, deletion: BTreeMap<String, Option<Vec<String>>>,
@ -77,7 +78,7 @@ pub fn push_synonyms_deletion(
} }
pub fn apply_synonyms_deletion( pub fn apply_synonyms_deletion(
writer: &mut heed::RwTxn, writer: &mut heed::RwTxn<MainT>,
main_store: store::Main, main_store: store::Main,
synonyms_store: store::Synonyms, synonyms_store: store::Synonyms,
deletion: BTreeMap<String, Option<Vec<String>>>, deletion: BTreeMap<String, Option<Vec<String>>>,

View File

@ -1,35 +1,40 @@
[package] [package]
name = "meilidb-http" name = "meilisearch-http"
version = "0.2.0" version = "0.8.2"
authors = [ authors = [
"Quentin de Quelen <quentin@dequelen.me>", "Quentin de Quelen <quentin@dequelen.me>",
"Clément Renault <clement@meilisearch.com>", "Clément Renault <clement@meilisearch.com>",
] ]
edition = "2018" edition = "2018"
[[bin]]
name = "meilisearch"
path = "src/main.rs"
[dependencies] [dependencies]
bincode = "1.2.0" bincode = "1.2.0"
chrono = { version = "0.4.9", features = ["serde"] } chrono = { version = "0.4.9", features = ["serde"] }
crossbeam-channel = "0.4.0" crossbeam-channel = "0.4.0"
env_logger = "0.7.1" env_logger = "0.7.1"
envconfig = "0.5.1" heed = "0.6.0"
envconfig_derive = "0.5.1"
heed = "0.5.0"
http = "0.1.19" http = "0.1.19"
indexmap = { version = "1.3.0", features = ["serde-1"] } indexmap = { version = "1.3.0", features = ["serde-1"] }
jemallocator = "0.3.2"
log = "0.4.8" log = "0.4.8"
main_error = "0.1.0" main_error = "0.1.0"
meilidb-core = { path = "../meilidb-core", version = "0.7.0" } meilisearch-core = { path = "../meilisearch-core", version = "0.8.2" }
meilidb-schema = { path = "../meilidb-schema", version = "0.6.0" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.8.2" }
pretty-bytes = "0.2.2" pretty-bytes = "0.2.2"
rand = "0.7.2" rand = "0.7.2"
rayon = "1.2.0" rayon = "1.2.0"
serde = { version = "1.0.101", features = ["derive"] } serde = { version = "1.0.101", features = ["derive"] }
serde_json = { version = "1.0.41", features = ["preserve_order"] } serde_json = { version = "1.0.41", features = ["preserve_order"] }
serde_qs = "0.5.1"
siphasher = "0.3.1"
structopt = "0.3.3" structopt = "0.3.3"
sysinfo = "0.9.5" sysinfo = "0.9.5"
ureq = { version = "0.11.2", features = ["tls"], default-features = false }
walkdir = "2.2.9" walkdir = "2.2.9"
whoami = "0.6"
[dependencies.async-compression] [dependencies.async-compression]
default-features = false default-features = false
@ -54,3 +59,6 @@ rev = "e77709370bb24cf776fe6da902467c35131535b1"
[build-dependencies] [build-dependencies]
vergen = "3.0.4" vergen = "3.0.4"
[target.'cfg(unix)'.dependencies]
jemallocator = "0.3.2"

View File

@ -0,0 +1,69 @@
use std::hash::{Hash, Hasher};
use std::thread;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use log::error;
use serde::Serialize;
use serde_qs as qs;
use siphasher::sip::SipHasher;
const AMPLITUDE_API_KEY: &str = "f7fba398780e06d8fe6666a9be7e3d47";
#[derive(Debug, Serialize)]
struct Event<'a> {
user_id: &'a str,
event_type: &'a str,
device_id: &'a str,
time: u64,
}
#[derive(Debug, Serialize)]
struct AmplitudeRequest<'a> {
api_key: &'a str,
event: &'a str,
}
pub fn analytics_sender() {
let username = whoami::username();
let hostname = whoami::hostname();
let platform = whoami::platform();
let uid = username + &hostname + &platform.to_string();
let mut hasher = SipHasher::new();
uid.hash(&mut hasher);
let hash = hasher.finish();
let uid = format!("{:X}", hash);
let platform = platform.to_string();
loop {
let n = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let user_id = &uid;
let device_id = &platform;
let time = n.as_secs();
let event_type = "runtime_tick";
let event = Event {
user_id,
event_type,
device_id,
time,
};
let event = serde_json::to_string(&event).unwrap();
let request = AmplitudeRequest {
api_key: AMPLITUDE_API_KEY,
event: &event,
};
let body = qs::to_string(&request).unwrap();
let response = ureq::post("https://api.amplitude.com/httpapi").send_string(&body);
if !response.ok() {
let body = response.into_string().unwrap();
error!("Unsuccessful call to Amplitude: {}", body);
}
thread::sleep(Duration::from_secs(86_400)) // one day
}
}

View File

@ -4,15 +4,15 @@ use std::sync::Arc;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use heed::types::{SerdeBincode, Str}; use heed::types::{SerdeBincode, Str};
use log::*; use log::error;
use meilidb_core::{Database, MResult}; use meilisearch_core::{Database, MainT, UpdateT, Error as MError, MResult};
use sysinfo::Pid; use sysinfo::Pid;
use crate::option::Opt; use crate::option::Opt;
use crate::routes::index::index_update_callback; use crate::routes::index::index_update_callback;
pub type FreqsMap = HashMap<String, usize>; const LAST_UPDATE_KEY: &str = "last-update";
type SerdeFreqsMap = SerdeBincode<FreqsMap>;
type SerdeDatetime = SerdeBincode<DateTime<Utc>>; type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
#[derive(Clone)] #[derive(Clone)]
@ -32,63 +32,41 @@ impl Deref for Data {
pub struct DataInner { pub struct DataInner {
pub db: Arc<Database>, pub db: Arc<Database>,
pub db_path: String, pub db_path: String,
pub admin_token: Option<String>, pub api_key: Option<String>,
pub server_pid: Pid, pub server_pid: Pid,
} }
impl DataInner { impl DataInner {
pub fn is_indexing(&self, reader: &heed::RoTxn, index: &str) -> MResult<Option<bool>> { pub fn is_indexing(&self, reader: &heed::RoTxn<UpdateT>, index: &str) -> MResult<Option<bool>> {
match self.db.open_index(&index) { match self.db.open_index(&index) {
Some(index) => index.current_update_id(&reader).map(|u| Some(u.is_some())), Some(index) => index.current_update_id(&reader).map(|u| Some(u.is_some())),
None => Ok(None), None => Ok(None),
} }
} }
pub fn last_update( pub fn last_update(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<DateTime<Utc>>> {
&self,
reader: &heed::RoTxn,
index_name: &str,
) -> MResult<Option<DateTime<Utc>>> {
let key = format!("last-update-{}", index_name);
match self match self
.db .db
.common_store() .common_store()
.get::<Str, SerdeDatetime>(&reader, &key)? .get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)?
{ {
Some(datetime) => Ok(Some(datetime)), Some(datetime) => Ok(Some(datetime)),
None => Ok(None), None => Ok(None),
} }
} }
pub fn set_last_update(&self, writer: &mut heed::RwTxn, index_name: &str) -> MResult<()> { pub fn set_last_update(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
let key = format!("last-update-{}", index_name);
self.db self.db
.common_store() .common_store()
.put::<Str, SerdeDatetime>(writer, &key, &Utc::now()) .put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, &Utc::now())
.map_err(Into::into) .map_err(Into::into)
} }
pub fn fields_frequency( pub fn compute_stats(&self, writer: &mut heed::RwTxn<MainT>, index_uid: &str) -> MResult<()> {
&self, let index = match self.db.open_index(&index_uid) {
reader: &heed::RoTxn,
index_name: &str,
) -> MResult<Option<FreqsMap>> {
let key = format!("fields-frequency-{}", index_name);
match self
.db
.common_store()
.get::<Str, SerdeFreqsMap>(&reader, &key)?
{
Some(freqs) => Ok(Some(freqs)),
None => Ok(None),
}
}
pub fn compute_stats(&self, writer: &mut heed::RwTxn, index_name: &str) -> MResult<()> {
let index = match self.db.open_index(&index_name) {
Some(index) => index, Some(index) => index,
None => { None => {
error!("Impossible to retrieve index {}", index_name); error!("Impossible to retrieve index {}", index_uid);
return Ok(()); return Ok(());
} }
}; };
@ -115,27 +93,25 @@ impl DataInner {
.map(|(a, c)| (schema.attribute_name(a).to_owned(), c)) .map(|(a, c)| (schema.attribute_name(a).to_owned(), c))
.collect(); .collect();
let key = format!("fields-frequency-{}", index_name); index
self.db .main
.common_store() .put_fields_frequency(writer, &frequency)
.put::<Str, SerdeFreqsMap>(writer, &key, &frequency)?; .map_err(MError::Zlmdb)
Ok(())
} }
} }
impl Data { impl Data {
pub fn new(opt: Opt) -> Data { pub fn new(opt: Opt) -> Data {
let db_path = opt.database_path.clone(); let db_path = opt.db_path.clone();
let admin_token = opt.admin_token.clone(); let api_key = opt.api_key.clone();
let server_pid = sysinfo::get_current_pid().unwrap(); let server_pid = sysinfo::get_current_pid().unwrap();
let db = Arc::new(Database::open_or_create(opt.database_path.clone()).unwrap()); let db = Arc::new(Database::open_or_create(opt.db_path.clone()).unwrap());
let inner_data = DataInner { let inner_data = DataInner {
db: db.clone(), db: db.clone(),
db_path, db_path,
admin_token, api_key,
server_pid, server_pid,
}; };
@ -144,8 +120,8 @@ impl Data {
}; };
let callback_context = data.clone(); let callback_context = data.clone();
db.set_update_callback(Box::new(move |index_name, status| { db.set_update_callback(Box::new(move |index_uid, status| {
index_update_callback(&index_name, &callback_context, status); index_update_callback(&index_uid, &callback_context, status);
})); }));
data data

View File

@ -1,10 +1,11 @@
use crate::routes::setting::{RankingOrdering, SettingBody}; use crate::routes::setting::{RankingOrdering, SettingBody};
use indexmap::IndexMap; use indexmap::IndexMap;
use log::*; use log::error;
use meilidb_core::criterion::*; use meilisearch_core::criterion::*;
use meilidb_core::Highlight; use meilisearch_core::Highlight;
use meilidb_core::{Index, RankedMap}; use meilisearch_core::{Index, RankedMap};
use meilidb_schema::{Schema, SchemaAttr}; use meilisearch_core::MainT;
use meilisearch_schema::{Schema, SchemaAttr};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use std::cmp::Ordering; use std::cmp::Ordering;
@ -57,8 +58,8 @@ impl fmt::Display for Error {
} }
} }
impl From<meilidb_core::Error> for Error { impl From<meilisearch_core::Error> for Error {
fn from(error: meilidb_core::Error) -> Self { fn from(error: meilisearch_core::Error) -> Self {
Error::Internal(error.to_string()) Error::Internal(error.to_string())
} }
} }
@ -157,7 +158,7 @@ impl<'a> SearchBuilder<'a> {
self self
} }
pub fn search(&self, reader: &heed::RoTxn) -> Result<SearchResult, Error> { pub fn search(&self, reader: &heed::RoTxn<MainT>) -> Result<SearchResult, Error> {
let schema = self.index.main.schema(reader); let schema = self.index.main.schema(reader);
let schema = schema.map_err(|e| Error::Internal(e.to_string()))?; let schema = schema.map_err(|e| Error::Internal(e.to_string()))?;
let schema = match schema { let schema = match schema {
@ -285,7 +286,7 @@ impl<'a> SearchBuilder<'a> {
pub fn get_criteria( pub fn get_criteria(
&self, &self,
reader: &heed::RoTxn, reader: &heed::RoTxn<MainT>,
ranked_map: &'a RankedMap, ranked_map: &'a RankedMap,
schema: &Schema, schema: &Schema,
) -> Result<Option<Criteria<'a>>, Error> { ) -> Result<Option<Criteria<'a>>, Error> {

View File

@ -0,0 +1,2 @@
pub mod meilisearch;
pub mod tide;

View File

@ -3,7 +3,7 @@ use crate::models::token::*;
use crate::Data; use crate::Data;
use chrono::Utc; use chrono::Utc;
use heed::types::{SerdeBincode, Str}; use heed::types::{SerdeBincode, Str};
use meilidb_core::Index; use meilisearch_core::Index;
use serde_json::Value; use serde_json::Value;
use tide::Context; use tide::Context;
@ -17,30 +17,29 @@ pub trait ContextExt {
impl ContextExt for Context<Data> { impl ContextExt for Context<Data> {
fn is_allowed(&self, acl: ACL) -> SResult<()> { fn is_allowed(&self, acl: ACL) -> SResult<()> {
let admin_token = match &self.state().admin_token { let api_key = match &self.state().api_key {
Some(admin_token) => admin_token, Some(api_key) => api_key,
None => return Ok(()), None => return Ok(()),
}; };
let user_api_key = self.header("X-Meili-API-Key")?; let user_api_key = self.header("X-Meili-API-Key")?;
if user_api_key == *admin_token { if user_api_key == *api_key {
return Ok(()); return Ok(());
} }
let request_index: Option<String> = None; //self.param::<String>("index").ok(); let request_index: Option<String> = None; //self.param::<String>("index").ok();
let db = &self.state().db; let db = &self.state().db;
let env = &db.env; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, user_api_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, user_api_key);
let token_config = db let token_config = db
.common_store() .common_store()
.get::<Str, SerdeBincode<Token>>(&reader, &token_key) .get::<_, Str, SerdeBincode<Token>>(&reader, &token_key)
.map_err(ResponseError::internal)? .map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found(format!( .ok_or(ResponseError::invalid_token(format!(
"token key: {}", "Api key does not exist: {}",
token_key user_api_key
)))?; )))?;
if token_config.revoked { if token_config.revoked {
@ -93,12 +92,12 @@ impl ContextExt for Context<Data> {
} }
fn index(&self) -> Result<Index, ResponseError> { fn index(&self) -> Result<Index, ResponseError> {
let index_name = self.url_param("index")?; let index_uid = self.url_param("index")?;
let index = self let index = self
.state() .state()
.db .db
.open_index(&index_name) .open_index(&index_uid)
.ok_or(ResponseError::index_not_found(index_name))?; .ok_or(ResponseError::index_not_found(index_uid))?;
Ok(index) Ok(index)
} }
@ -106,7 +105,7 @@ impl ContextExt for Context<Data> {
let name = self let name = self
.param::<Value>("identifier") .param::<Value>("identifier")
.as_ref() .as_ref()
.map(meilidb_core::serde::value_to_string) .map(meilisearch_core::serde::value_to_string)
.map_err(|e| ResponseError::bad_parameter("identifier", e))? .map_err(|e| ResponseError::bad_parameter("identifier", e))?
.ok_or(ResponseError::bad_parameter( .ok_or(ResponseError::bad_parameter(
"identifier", "identifier",

View File

@ -1,6 +1,3 @@
#[macro_use]
extern crate envconfig_derive;
pub mod data; pub mod data;
pub mod error; pub mod error;
pub mod helpers; pub mod helpers;

View File

@ -1,24 +1,34 @@
use std::env::VarError::NotPresent;
use std::{env, thread};
use http::header::HeaderValue; use http::header::HeaderValue;
use log::info; use log::info;
use main_error::MainError; use main_error::MainError;
use structopt::StructOpt;
use tide::middleware::{CorsMiddleware, CorsOrigin}; use tide::middleware::{CorsMiddleware, CorsOrigin};
use tide_log::RequestLogger; use tide_log::RequestLogger;
use meilidb_http::data::Data; use meilisearch_http::data::Data;
use meilidb_http::option::Opt; use meilisearch_http::option::Opt;
use meilidb_http::routes; use meilisearch_http::routes;
use meilidb_http::routes::index::index_update_callback; use meilisearch_http::routes::index::index_update_callback;
#[cfg(not(target_os = "macos"))] mod analytics;
#[cfg(target_os = "linux")]
#[global_allocator] #[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
pub fn main() -> Result<(), MainError> { pub fn main() -> Result<(), MainError> {
env_logger::init(); env_logger::init();
let opt = Opt::new(); let opt = Opt::from_args();
let data = Data::new(opt.clone()); let data = Data::new(opt.clone());
if env::var("MEILI_NO_ANALYTICS") == Err(NotPresent) {
thread::spawn(|| analytics::analytics_sender());
}
let data_cloned = data.clone(); let data_cloned = data.clone();
data.db.set_update_callback(Box::new(move |name, status| { data.db.set_update_callback(Box::new(move |name, status| {
index_update_callback(name, &data_cloned, status); index_update_callback(name, &data_cloned, status);

View File

@ -1,7 +1,7 @@
use std::collections::HashSet; use std::collections::HashSet;
use indexmap::IndexMap; use indexmap::IndexMap;
use meilidb_schema::{Schema, SchemaBuilder, SchemaProps}; use meilisearch_schema::{Schema, SchemaBuilder, SchemaProps};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]

View File

@ -0,0 +1,20 @@
use structopt::StructOpt;
#[derive(Debug, Clone, StructOpt)]
pub struct Opt {
/// The destination where the database must be created.
#[structopt(long, env = "MEILI_DB_PATH", default_value = "/tmp/meilisearch")]
pub db_path: String,
/// The address on which the http server will listen.
#[structopt(long, env = "MEILI_HTTP_ADDR", default_value = "127.0.0.1:8080")]
pub http_addr: String,
/// The master key allowing you to do everything on the server.
#[structopt(long, env = "MEILI_API_KEY")]
pub api_key: Option<String>,
/// Do not send analytics to Meili.
#[structopt(long, env = "MEILI_NO_ANALYTICS")]
pub no_analytics: bool,
}

View File

@ -19,10 +19,10 @@ pub async fn get_document(ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let identifier = ctx.identifier()?; let identifier = ctx.identifier()?;
let document_id = meilidb_core::serde::compute_document_id(identifier.clone()); let document_id = meilisearch_core::serde::compute_document_id(identifier.clone());
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let response = index let response = index
.document::<IndexMap<String, Value>>(&reader, None, document_id) .document::<IndexMap<String, Value>>(&reader, None, document_id)
@ -47,18 +47,18 @@ pub async fn delete_document(ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let identifier = ctx.identifier()?; let identifier = ctx.identifier()?;
let document_id = meilidb_core::serde::compute_document_id(identifier.clone()); let document_id = meilisearch_core::serde::compute_document_id(identifier.clone());
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut update_writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut documents_deletion = index.documents_deletion(); let mut documents_deletion = index.documents_deletion();
documents_deletion.delete_document_by_id(document_id); documents_deletion.delete_document_by_id(document_id);
let update_id = documents_deletion let update_id = documents_deletion
.finalize(&mut writer) .finalize(&mut update_writer)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; update_writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::response::json(response_body)
@ -74,7 +74,7 @@ struct BrowseQuery {
attributes_to_retrieve: Option<String>, attributes_to_retrieve: Option<String>,
} }
pub async fn browse_documents(ctx: Context<Data>) -> SResult<Response> { pub async fn get_all_documents(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(DocumentsRead)?; ctx.is_allowed(DocumentsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
@ -83,8 +83,8 @@ pub async fn browse_documents(ctx: Context<Data>) -> SResult<Response> {
let offset = query.offset.unwrap_or(0); let offset = query.offset.unwrap_or(0);
let limit = query.limit.unwrap_or(20); let limit = query.limit.unwrap_or(20);
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let documents_ids: Result<BTreeSet<_>, _> = let documents_ids: Result<BTreeSet<_>, _> =
match index.documents_fields_counts.documents_ids(&reader) { match index.documents_fields_counts.documents_ids(&reader) {
@ -114,19 +114,11 @@ pub async fn browse_documents(ctx: Context<Data>) -> SResult<Response> {
} }
} }
if response_body.is_empty() { Ok(tide::response::json(response_body))
Ok(tide::response::json(response_body)
.with_status(StatusCode::NO_CONTENT)
.into_response())
} else {
Ok(tide::response::json(response_body)
.with_status(StatusCode::OK)
.into_response())
}
} }
fn infered_schema(document: &IndexMap<String, Value>) -> Option<meilidb_schema::Schema> { fn infered_schema(document: &IndexMap<String, Value>) -> Option<meilisearch_schema::Schema> {
use meilidb_schema::{SchemaBuilder, DISPLAYED, INDEXED}; use meilisearch_schema::{SchemaBuilder, DISPLAYED, INDEXED};
let mut identifier = None; let mut identifier = None;
for key in document.keys() { for key in document.keys() {
@ -154,18 +146,19 @@ async fn update_multiple_documents(mut ctx: Context<Data>, is_partial: bool) ->
ctx.body_json().await.map_err(ResponseError::bad_request)?; ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let mut update_writer = db.update_write_txn().map_err(ResponseError::internal)?;
let current_schema = index let current_schema = index
.main .main
.schema(&writer) .schema(&reader)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
if current_schema.is_none() { if current_schema.is_none() {
match data.first().and_then(infered_schema) { match data.first().and_then(infered_schema) {
Some(schema) => { Some(schema) => {
index index
.schema_update(&mut writer, schema) .schema_update(&mut update_writer, schema)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
} }
None => return Err(ResponseError::bad_request("Could not infer a schema")), None => return Err(ResponseError::bad_request("Could not infer a schema")),
@ -183,10 +176,10 @@ async fn update_multiple_documents(mut ctx: Context<Data>, is_partial: bool) ->
} }
let update_id = document_addition let update_id = document_addition
.finalize(&mut writer) .finalize(&mut update_writer)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; update_writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::response::json(response_body)
@ -208,15 +201,15 @@ pub async fn delete_multiple_documents(mut ctx: Context<Data>) -> SResult<Respon
let data: Vec<Value> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: Vec<Value> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut documents_deletion = index.documents_deletion(); let mut documents_deletion = index.documents_deletion();
for identifier in data { for identifier in data {
if let Some(identifier) = meilidb_core::serde::value_to_string(&identifier) { if let Some(identifier) = meilisearch_core::serde::value_to_string(&identifier) {
documents_deletion documents_deletion
.delete_document_by_id(meilidb_core::serde::compute_document_id(identifier)); .delete_document_by_id(meilisearch_core::serde::compute_document_id(identifier));
} }
} }
@ -237,8 +230,9 @@ pub async fn clear_all_documents(ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let update_id = index let update_id = index
.clear_all(&mut writer) .clear_all(&mut writer)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;

View File

@ -11,12 +11,11 @@ const UNHEALTHY_KEY: &str = "_is_unhealthy";
pub async fn get_health(ctx: Context<Data>) -> SResult<()> { pub async fn get_health(ctx: Context<Data>) -> SResult<()> {
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let common_store = ctx.state().db.common_store(); let common_store = ctx.state().db.common_store();
if let Ok(Some(_)) = common_store.get::<Str, Unit>(&reader, UNHEALTHY_KEY) { if let Ok(Some(_)) = common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY) {
return Err(ResponseError::Maintenance); return Err(ResponseError::Maintenance);
} }
@ -27,11 +26,10 @@ pub async fn set_healthy(ctx: Context<Data>) -> SResult<()> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
let common_store = ctx.state().db.common_store(); let common_store = ctx.state().db.common_store();
match common_store.delete::<Str>(&mut writer, UNHEALTHY_KEY) { match common_store.delete::<_, Str>(&mut writer, UNHEALTHY_KEY) {
Ok(_) => (), Ok(_) => (),
Err(e) => return Err(ResponseError::internal(e)), Err(e) => return Err(ResponseError::internal(e)),
} }
@ -47,12 +45,11 @@ pub async fn set_unhealthy(ctx: Context<Data>) -> SResult<()> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
let common_store = ctx.state().db.common_store(); let common_store = ctx.state().db.common_store();
if let Err(e) = common_store.put::<Str, Unit>(&mut writer, UNHEALTHY_KEY, &()) { if let Err(e) = common_store.put::<_, Str, Unit>(&mut writer, UNHEALTHY_KEY, &()) {
return Err(ResponseError::internal(e)); return Err(ResponseError::internal(e));
} }

View File

@ -0,0 +1,439 @@
use chrono::{DateTime, Utc};
use http::StatusCode;
use log::error;
use meilisearch_core::ProcessedUpdateResult;
use meilisearch_schema::{Schema, SchemaBuilder};
use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tide::querystring::ContextExt as QSContextExt;
use tide::response::IntoResponse;
use tide::{Context, Response};
use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt;
use crate::models::schema::SchemaBody;
use crate::models::token::ACL::*;
use crate::routes::document::IndexUpdateResponse;
use crate::Data;
fn generate_uid() -> String {
let mut rng = rand::thread_rng();
let sample = b"abcdefghijklmnopqrstuvwxyz0123456789";
sample
.choose_multiple(&mut rng, 8)
.map(|c| *c as char)
.collect()
}
pub async fn list_indexes(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let indexes_uids = ctx.state().db.indexes_uids();
let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let mut response_body = Vec::new();
for index_uid in indexes_uids {
let index = ctx.state().db.open_index(&index_uid);
match index {
Some(index) => {
let name = index
.main
.name(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'name' not found"))?;
let created_at = index
.main
.created_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let index_reponse = IndexResponse {
name,
uid: index_uid,
created_at,
updated_at,
};
response_body.push(index_reponse);
}
None => error!(
"Index {} is referenced in the indexes list but cannot be found",
index_uid
),
}
}
Ok(tide::response::json(response_body))
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
}
pub async fn get_index(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let uid = ctx.url_param("index")?;
let name = index
.main
.name(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'name' not found"))?;
let created_at = index
.main
.created_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let response_body = IndexResponse {
name,
uid,
created_at,
updated_at,
};
Ok(tide::response::json(response_body))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct IndexCreateRequest {
name: String,
uid: Option<String>,
schema: Option<SchemaBody>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexCreateResponse {
name: String,
uid: String,
schema: Option<SchemaBody>,
#[serde(skip_serializing_if = "Option::is_none")]
update_id: Option<u64>,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
}
pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?;
let body = ctx
.body_json::<IndexCreateRequest>()
.await
.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let uid = match body.uid {
Some(uid) => uid,
None => loop {
let uid = generate_uid();
if db.open_index(&uid).is_none() {
break uid;
}
},
};
let created_index = match db.create_index(&uid) {
Ok(index) => index,
Err(e) => return Err(ResponseError::create_index(e)),
};
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
let mut update_writer = db.update_write_txn().map_err(ResponseError::internal)?;
created_index
.main
.put_name(&mut writer, &body.name)
.map_err(ResponseError::internal)?;
created_index
.main
.put_created_at(&mut writer)
.map_err(ResponseError::internal)?;
created_index
.main
.put_updated_at(&mut writer)
.map_err(ResponseError::internal)?;
let schema: Option<Schema> = body.schema.clone().map(Into::into);
let mut response_update_id = None;
if let Some(schema) = schema {
let update_id = created_index
.schema_update(&mut update_writer, schema)
.map_err(ResponseError::internal)?;
response_update_id = Some(update_id)
}
writer.commit().map_err(ResponseError::internal)?;
update_writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexCreateResponse {
name: body.name,
uid,
schema: body.schema,
update_id: response_update_id,
created_at: Utc::now(),
updated_at: Utc::now(),
};
Ok(tide::response::json(response_body)
.with_status(StatusCode::CREATED)
.into_response())
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct UpdateIndexRequest {
name: String,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct UpdateIndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
}
pub async fn update_index(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?;
let body = ctx
.body_json::<UpdateIndexRequest>()
.await
.map_err(ResponseError::bad_request)?;
let index_uid = ctx.url_param("index")?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
index
.main
.put_name(&mut writer, &body.name)
.map_err(ResponseError::internal)?;
index
.main
.put_updated_at(&mut writer)
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let created_at = index
.main
.created_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let response_body = UpdateIndexResponse {
name: body.name,
uid: index_uid,
created_at,
updated_at,
};
Ok(tide::response::json(response_body)
.with_status(StatusCode::ACCEPTED)
.into_response())
}
#[derive(Default, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SchemaParams {
raw: bool,
}
pub async fn get_index_schema(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let index = ctx.index()?;
// Tide doesn't support "no query param"
let params: SchemaParams = ctx.url_query().unwrap_or_default();
let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let schema = index
.main
.schema(&reader)
.map_err(ResponseError::open_index)?;
match schema {
Some(schema) => {
if params.raw {
Ok(tide::response::json(schema))
} else {
Ok(tide::response::json(SchemaBody::from(schema)))
}
}
None => Err(ResponseError::not_found("missing index schema")),
}
}
pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?;
let index_uid = ctx.url_param("index")?;
let params: SchemaParams = ctx.url_query().unwrap_or_default();
let schema = if params.raw {
ctx.body_json::<SchemaBuilder>()
.await
.map_err(ResponseError::bad_request)?
.build()
} else {
ctx.body_json::<SchemaBody>()
.await
.map_err(ResponseError::bad_request)?
.into()
};
let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let index = db
.open_index(&index_uid)
.ok_or(ResponseError::index_not_found(index_uid))?;
let update_id = index
.schema_update(&mut writer, schema.clone())
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body)
.with_status(StatusCode::ACCEPTED)
.into_response())
}
pub async fn get_update_status(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let db = &ctx.state().db;
let reader = db.update_read_txn().map_err(ResponseError::internal)?;
let update_id = ctx
.param::<u64>("update_id")
.map_err(|e| ResponseError::bad_parameter("update_id", e))?;
let index = ctx.index()?;
let status = index
.update_status(&reader, update_id)
.map_err(ResponseError::internal)?;
let response = match status {
Some(status) => tide::response::json(status)
.with_status(StatusCode::OK)
.into_response(),
None => tide::response::json(json!({ "message": "unknown update id" }))
.with_status(StatusCode::NOT_FOUND)
.into_response(),
};
Ok(response)
}
pub async fn get_all_updates_status(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let db = &ctx.state().db;
let reader = db.update_read_txn().map_err(ResponseError::internal)?;
let index = ctx.index()?;
let all_status = index
.all_updates_status(&reader)
.map_err(ResponseError::internal)?;
let response = tide::response::json(all_status)
.with_status(StatusCode::OK)
.into_response();
Ok(response)
}
pub async fn delete_index(ctx: Context<Data>) -> SResult<StatusCode> {
ctx.is_allowed(IndexesWrite)?;
let index_uid = ctx.url_param("index")?;
let found = ctx
.state()
.db
.delete_index(&index_uid)
.map_err(ResponseError::internal)?;
if found {
Ok(StatusCode::NO_CONTENT)
} else {
Ok(StatusCode::NOT_FOUND)
}
}
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {
if status.error.is_some() {
return;
}
if let Some(index) = data.db.open_index(&index_uid) {
let db = &data.db;
let mut writer = match db.main_write_txn() {
Ok(writer) => writer,
Err(e) => {
error!("Impossible to get write_txn; {}", e);
return;
}
};
if let Err(e) = data.compute_stats(&mut writer, &index_uid) {
error!("Impossible to compute stats; {}", e)
}
if let Err(e) = data.set_last_update(&mut writer) {
error!("Impossible to update last_update; {}", e)
}
if let Err(e) = index.main.put_updated_at(&mut writer) {
error!("Impossible to update updated_at; {}", e)
}
if let Err(e) = writer.commit() {
error!("Impossible to get write_txn; {}", e);
}
}
}

View File

@ -26,15 +26,14 @@ pub async fn list(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let common_store = db.common_store(); let common_store = db.common_store();
let mut response: Vec<Token> = Vec::new(); let mut response: Vec<Token> = Vec::new();
let iter = common_store let iter = common_store
.prefix_iter::<Str, SerdeBincode<Token>>(&reader, TOKEN_PREFIX_KEY) .prefix_iter::<_, Str, SerdeBincode<Token>>(&reader, TOKEN_PREFIX_KEY)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
for result in iter { for result in iter {
@ -50,14 +49,13 @@ pub async fn get(ctx: Context<Data>) -> SResult<Response> {
let request_key = ctx.url_param("key")?; let request_key = ctx.url_param("key")?;
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key);
let token_config = db let token_config = db
.common_store() .common_store()
.get::<Str, SerdeBincode<Token>>(&reader, &token_key) .get::<_, Str, SerdeBincode<Token>>(&reader, &token_key)
.map_err(ResponseError::internal)? .map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found(format!( .ok_or(ResponseError::not_found(format!(
"token key: {}", "token key: {}",
@ -97,11 +95,10 @@ pub async fn create(mut ctx: Context<Data>) -> SResult<Response> {
}; };
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
db.common_store() db.common_store()
.put::<Str, SerdeBincode<Token>>(&mut writer, &token_key, &token_definition) .put::<_, Str, SerdeBincode<Token>>(&mut writer, &token_key, &token_definition)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; writer.commit().map_err(ResponseError::internal)?;
@ -117,6 +114,8 @@ pub struct UpdatedRequest {
description: Option<String>, description: Option<String>,
acl: Option<Vec<ACL>>, acl: Option<Vec<ACL>>,
indexes: Option<Vec<Wildcard>>, indexes: Option<Vec<Wildcard>>,
expires_at: Option<DateTime<Utc>>,
revoked: Option<bool>,
} }
pub async fn update(mut ctx: Context<Data>) -> SResult<Response> { pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
@ -126,15 +125,14 @@ pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
let data: UpdatedRequest = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: UpdatedRequest = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
let common_store = db.common_store(); let common_store = db.common_store();
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key);
let mut token_config = common_store let mut token_config = common_store
.get::<Str, SerdeBincode<Token>>(&writer, &token_key) .get::<_, Str, SerdeBincode<Token>>(&writer, &token_key)
.map_err(ResponseError::internal)? .map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found(format!( .ok_or(ResponseError::not_found(format!(
"token key: {}", "token key: {}",
@ -154,16 +152,24 @@ pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
token_config.indexes = indexes; token_config.indexes = indexes;
} }
if let Some(expires_at) = data.expires_at {
token_config.expires_at = expires_at;
}
if let Some(revoked) = data.revoked {
token_config.revoked = revoked;
}
token_config.updated_at = Utc::now(); token_config.updated_at = Utc::now();
common_store common_store
.put::<Str, SerdeBincode<Token>>(&mut writer, &token_key, &token_config) .put::<_, Str, SerdeBincode<Token>>(&mut writer, &token_key, &token_config)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; writer.commit().map_err(ResponseError::internal)?;
Ok(tide::response::json(token_config) Ok(tide::response::json(token_config)
.with_status(StatusCode::ACCEPTED) .with_status(StatusCode::OK)
.into_response()) .into_response())
} }
@ -172,18 +178,17 @@ pub async fn delete(ctx: Context<Data>) -> SResult<StatusCode> {
let request_key = ctx.url_param("key")?; let request_key = ctx.url_param("key")?;
let db = &ctx.state().db; let db = &ctx.state().db;
let env = &db.env; let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
let mut writer = env.write_txn().map_err(ResponseError::internal)?;
let common_store = db.common_store(); let common_store = db.common_store();
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key);
common_store common_store
.delete::<Str>(&mut writer, &token_key) .delete::<_, Str>(&mut writer, &token_key)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; writer.commit().map_err(ResponseError::internal)?;
Ok(StatusCode::ACCEPTED) Ok(StatusCode::NO_CONTENT)
} }

View File

@ -13,7 +13,10 @@ pub mod synonym;
pub fn load_routes(app: &mut tide::App<Data>) { pub fn load_routes(app: &mut tide::App<Data>) {
app.at("").nest(|router| { app.at("").nest(|router| {
router.at("/indexes").nest(|router| { router.at("/indexes").nest(|router| {
router.at("/").get(index::list_indexes); router
.at("/")
.get(index::list_indexes)
.post(index::create_index);
router.at("/search").post(search::search_multi_index); router.at("/search").post(search::search_multi_index);
@ -28,15 +31,19 @@ pub fn load_routes(app: &mut tide::App<Data>) {
router router
.at("/") .at("/")
.get(index::get_index_schema) .get(index::get_index)
.post(index::create_index) .put(index::update_index)
.put(index::update_schema)
.delete(index::delete_index); .delete(index::delete_index);
router
.at("/schema")
.get(index::get_index_schema)
.put(index::update_schema);
router.at("/documents").nest(|router| { router.at("/documents").nest(|router| {
router router
.at("/") .at("/")
.get(document::browse_documents) .get(document::get_all_documents)
.post(document::add_or_replace_multiple_documents) .post(document::add_or_replace_multiple_documents)
.put(document::add_or_update_multiple_documents) .put(document::add_or_update_multiple_documents)
.delete(document::clear_all_documents); .delete(document::clear_all_documents);
@ -53,8 +60,12 @@ pub fn load_routes(app: &mut tide::App<Data>) {
.post(document::delete_multiple_documents); .post(document::delete_multiple_documents);
}); });
router.at("/synonym").nest(|router| { router.at("/synonyms").nest(|router| {
router.at("/").get(synonym::list).post(synonym::create); router
.at("/")
.get(synonym::list)
.post(synonym::create)
.delete(synonym::clear);
router router
.at("/:synonym") .at("/:synonym")
@ -63,14 +74,13 @@ pub fn load_routes(app: &mut tide::App<Data>) {
.delete(synonym::delete); .delete(synonym::delete);
router.at("/batch").post(synonym::batch_write); router.at("/batch").post(synonym::batch_write);
router.at("/clear").post(synonym::clear);
}); });
router.at("/stop-words").nest(|router| { router.at("/stop-words").nest(|router| {
router router
.at("/") .at("/")
.get(stop_words::list) .get(stop_words::list)
.put(stop_words::add) .patch(stop_words::add)
.delete(stop_words::delete); .delete(stop_words::delete);
}); });

View File

@ -2,14 +2,14 @@ use std::collections::HashMap;
use std::collections::HashSet; use std::collections::HashSet;
use std::time::Duration; use std::time::Duration;
use meilidb_core::Index; use meilisearch_core::Index;
use rayon::iter::{IntoParallelIterator, ParallelIterator}; use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tide::querystring::ContextExt as QSContextExt; use tide::querystring::ContextExt as QSContextExt;
use tide::{Context, Response}; use tide::{Context, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::meilidb::{Error, IndexSearchExt, SearchHit}; use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::ContextExt;
use crate::Data; use crate::Data;
@ -33,8 +33,8 @@ pub async fn search_with_url_query(ctx: Context<Data>) -> SResult<Response> {
// ctx.is_allowed(DocumentsRead)?; // ctx.is_allowed(DocumentsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let schema = index let schema = index
.main .main
@ -155,13 +155,8 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
for index in index_list.clone() { for index in index_list.clone() {
if index == "*" { if index == "*" {
index_list = ctx index_list = ctx.state().db.indexes_uids().into_iter().collect();
.state() break;
.db
.indexes_names()
.map_err(ResponseError::internal)?
.into_iter()
.collect();
} }
} }
@ -181,10 +176,10 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
let par_body = body.clone(); let par_body = body.clone();
let responses_per_index: Vec<SResult<_>> = index_list let responses_per_index: Vec<SResult<_>> = index_list
.into_par_iter() .into_par_iter()
.map(move |index_name| { .map(move |index_uid| {
let index: Index = db let index: Index = db
.open_index(&index_name) .open_index(&index_uid)
.ok_or(ResponseError::index_not_found(&index_name))?; .ok_or(ResponseError::index_not_found(&index_uid))?;
let mut search_builder = index.new_search(par_body.query.clone()); let mut search_builder = index.new_search(par_body.query.clone());
@ -207,7 +202,7 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
search_builder.filters(filters); search_builder.filters(filters);
} }
if let Some(timeout_ms) = par_body.timeout_ms { if let Some(timeout_ms) = par_body.timeout_ms {
search_builder.timeout(Duration::from_secs(timeout_ms)); search_builder.timeout(Duration::from_millis(timeout_ms));
} }
if let Some(matches) = par_body.matches { if let Some(matches) = par_body.matches {
if matches { if matches {
@ -215,13 +210,11 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
} }
} }
let env = &db.env; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let response = search_builder let response = search_builder
.search(&reader) .search(&reader)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
Ok((index_name, response)) Ok((index_uid, response))
}) })
.collect(); .collect();
@ -230,11 +223,11 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
let mut max_query_time = 0; let mut max_query_time = 0;
for response in responses_per_index { for response in responses_per_index {
if let Ok((index_name, response)) = response { if let Ok((index_uid, response)) = response {
if response.processing_time_ms > max_query_time { if response.processing_time_ms > max_query_time {
max_query_time = response.processing_time_ms; max_query_time = response.processing_time_ms;
} }
hits_map.insert(index_name, response.hits); hits_map.insert(index_uid, response.hits);
} }
} }

View File

@ -1,4 +1,4 @@
use std::collections::{HashMap, HashSet}; use std::collections::HashMap;
use http::StatusCode; use http::StatusCode;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -14,7 +14,6 @@ use crate::Data;
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct SettingBody { pub struct SettingBody {
pub stop_words: Option<StopWords>,
pub ranking_order: Option<RankingOrder>, pub ranking_order: Option<RankingOrder>,
pub distinct_field: Option<DistinctField>, pub distinct_field: Option<DistinctField>,
pub ranking_rules: Option<RankingRules>, pub ranking_rules: Option<RankingRules>,
@ -27,7 +26,6 @@ pub enum RankingOrdering {
Dsc, Dsc,
} }
pub type StopWords = HashSet<String>;
pub type RankingOrder = Vec<String>; pub type RankingOrder = Vec<String>;
pub type DistinctField = String; pub type DistinctField = String;
pub type RankingRules = HashMap<String, RankingOrdering>; pub type RankingRules = HashMap<String, RankingOrdering>;
@ -36,8 +34,8 @@ pub async fn get(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let settings = match index.main.customs(&reader).unwrap() { let settings = match index.main.customs(&reader).unwrap() {
Some(bytes) => bincode::deserialize(bytes).unwrap(), Some(bytes) => bincode::deserialize(bytes).unwrap(),
@ -54,18 +52,15 @@ pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let reader = db.main_write_txn().map_err(ResponseError::internal)?;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut current_settings = match index.main.customs(&writer).unwrap() { let mut current_settings = match index.main.customs(&reader).unwrap() {
Some(bytes) => bincode::deserialize(bytes).unwrap(), Some(bytes) => bincode::deserialize(bytes).unwrap(),
None => SettingBody::default(), None => SettingBody::default(),
}; };
if let Some(stop_words) = settings.stop_words {
current_settings.stop_words = Some(stop_words);
}
if let Some(ranking_order) = settings.ranking_order { if let Some(ranking_order) = settings.ranking_order {
current_settings.ranking_order = Some(ranking_order); current_settings.ranking_order = Some(ranking_order);
} }

View File

@ -1,6 +1,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use log::error;
use pretty_bytes::converter::convert; use pretty_bytes::converter::convert;
use serde::Serialize; use serde::Serialize;
use sysinfo::{NetworkExt, Pid, ProcessExt, ProcessorExt, System, SystemExt}; use sysinfo::{NetworkExt, Pid, ProcessExt, ProcessorExt, System, SystemExt};
@ -17,44 +18,38 @@ use crate::Data;
struct IndexStatsResponse { struct IndexStatsResponse {
number_of_documents: u64, number_of_documents: u64,
is_indexing: bool, is_indexing: bool,
last_update: Option<DateTime<Utc>>,
fields_frequency: HashMap<String, usize>, fields_frequency: HashMap<String, usize>,
} }
pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> { pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let index_name = ctx.url_param("index")?; let index_uid = ctx.url_param("index")?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let update_reader = db.update_read_txn().map_err(ResponseError::internal)?;
let number_of_documents = index let number_of_documents = index
.main .main
.number_of_documents(&reader) .number_of_documents(&reader)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
let fields_frequency = ctx let fields_frequency = index
.state() .main
.fields_frequency(&reader, &index_name) .fields_frequency(&reader)
.map_err(ResponseError::internal)? .map_err(ResponseError::internal)?
.unwrap_or_default(); .unwrap_or_default();
let is_indexing = ctx let is_indexing = ctx
.state() .state()
.is_indexing(&reader, &index_name) .is_indexing(&update_reader, &index_uid)
.map_err(ResponseError::internal)? .map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found("Index not found"))?; .ok_or(ResponseError::internal("'is_indexing' date not found"))?;
let last_update = ctx
.state()
.last_update(&reader, &index_name)
.map_err(ResponseError::internal)?;
let response = IndexStatsResponse { let response = IndexStatsResponse {
number_of_documents, number_of_documents,
is_indexing, is_indexing,
last_update,
fields_frequency, fields_frequency,
}; };
Ok(tide::response::json(response)) Ok(tide::response::json(response))
@ -64,50 +59,53 @@ pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct StatsResult { struct StatsResult {
database_size: u64, database_size: u64,
last_update: Option<DateTime<Utc>>,
indexes: HashMap<String, IndexStatsResponse>, indexes: HashMap<String, IndexStatsResponse>,
} }
pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> { pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let mut index_list = HashMap::new(); let mut index_list = HashMap::new();
if let Ok(indexes_set) = ctx.state().db.indexes_names() { let db = &ctx.state().db;
for index_name in indexes_set { let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let db = &ctx.state().db; let update_reader = db.update_read_txn().map_err(ResponseError::internal)?;
let env = &db.env;
let index = db.open_index(&index_name).unwrap(); let indexes_set = ctx.state().db.indexes_uids();
let reader = env.read_txn().map_err(ResponseError::internal)?; for index_uid in indexes_set {
let index = ctx.state().db.open_index(&index_uid);
let number_of_documents = index match index {
.main Some(index) => {
.number_of_documents(&reader) let number_of_documents = index
.map_err(ResponseError::internal)?; .main
.number_of_documents(&reader)
.map_err(ResponseError::internal)?;
let fields_frequency = ctx let fields_frequency = index
.state() .main
.fields_frequency(&reader, &index_name) .fields_frequency(&reader)
.map_err(ResponseError::internal)? .map_err(ResponseError::internal)?
.unwrap_or_default(); .unwrap_or_default();
let is_indexing = ctx let is_indexing = ctx
.state() .state()
.is_indexing(&reader, &index_name) .is_indexing(&update_reader, &index_uid)
.map_err(ResponseError::internal)? .map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found("Index not found"))?; .ok_or(ResponseError::internal("'is_indexing' date not found"))?;
let last_update = ctx let response = IndexStatsResponse {
.state() number_of_documents,
.last_update(&reader, &index_name) is_indexing,
.map_err(ResponseError::internal)?; fields_frequency,
};
let response = IndexStatsResponse { index_list.insert(index_uid, response);
number_of_documents, }
is_indexing, None => error!(
last_update, "Index {:?} is referenced in the indexes list but cannot be found",
fields_frequency, index_uid
}; ),
index_list.insert(index_name, response);
} }
} }
@ -118,8 +116,14 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
.filter(|metadata| metadata.is_file()) .filter(|metadata| metadata.is_file())
.fold(0, |acc, m| acc + m.len()); .fold(0, |acc, m| acc + m.len());
let last_update = ctx
.state()
.last_update(&reader)
.map_err(ResponseError::internal)?;
let response = StatsResult { let response = StatsResult {
database_size, database_size,
last_update,
indexes: index_list, indexes: index_list,
}; };

View File

@ -12,8 +12,8 @@ pub async fn list(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let stop_words_fst = index let stop_words_fst = index
.main .main
@ -35,8 +35,8 @@ pub async fn add(mut ctx: Context<Data>) -> SResult<Response> {
let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut stop_words_addition = index.stop_words_addition(); let mut stop_words_addition = index.stop_words_addition();
for stop_word in data { for stop_word in data {
@ -61,8 +61,8 @@ pub async fn delete(mut ctx: Context<Data>) -> SResult<Response> {
let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut stop_words_deletion = index.stop_words_deletion(); let mut stop_words_deletion = index.stop_words_deletion();
for stop_word in data { for stop_word in data {

View File

@ -31,8 +31,8 @@ pub async fn list(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let synonyms_fst = index let synonyms_fst = index
.main .main
@ -65,8 +65,8 @@ pub async fn get(ctx: Context<Data>) -> SResult<Response> {
let synonym = ctx.url_param("synonym")?; let synonym = ctx.url_param("synonym")?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let reader = env.read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let synonym_list = index let synonym_list = index
.synonyms .synonyms
@ -87,8 +87,8 @@ pub async fn create(mut ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut synonyms_addition = index.synonyms_addition(); let mut synonyms_addition = index.synonyms_addition();
@ -115,7 +115,7 @@ pub async fn create(mut ctx: Context<Data>) -> SResult<Response> {
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::response::json(response_body)
.with_status(StatusCode::CREATED) .with_status(StatusCode::ACCEPTED)
.into_response()) .into_response())
} }
@ -125,8 +125,8 @@ pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut synonyms_addition = index.synonyms_addition(); let mut synonyms_addition = index.synonyms_addition();
synonyms_addition.add_synonym(synonym.clone(), data.clone().into_iter()); synonyms_addition.add_synonym(synonym.clone(), data.clone().into_iter());
@ -147,8 +147,8 @@ pub async fn delete(ctx: Context<Data>) -> SResult<Response> {
let synonym = ctx.url_param("synonym")?; let synonym = ctx.url_param("synonym")?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut synonyms_deletion = index.synonyms_deletion(); let mut synonyms_deletion = index.synonyms_deletion();
synonyms_deletion.delete_all_alternatives_of(synonym); synonyms_deletion.delete_all_alternatives_of(synonym);
@ -171,8 +171,8 @@ pub async fn batch_write(mut ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut synonyms_addition = index.synonyms_addition(); let mut synonyms_addition = index.synonyms_addition();
for raw in data { for raw in data {
@ -207,12 +207,13 @@ pub async fn clear(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?; ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?; let index = ctx.index()?;
let env = &ctx.state().db.env; let db = &ctx.state().db;
let mut writer = env.write_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let synonyms_fst = index let synonyms_fst = index
.main .main
.synonyms_fst(&writer) .synonyms_fst(&reader)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
let synonyms_fst = synonyms_fst.unwrap_or_default(); let synonyms_fst = synonyms_fst.unwrap_or_default();

View File

@ -1,6 +1,6 @@
[package] [package]
name = "meilidb-schema" name = "meilisearch-schema"
version = "0.6.0" version = "0.8.2"
authors = ["Kerollmops <renault.cle@gmail.com>"] authors = ["Kerollmops <renault.cle@gmail.com>"]
edition = "2018" edition = "2018"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "meilidb-tokenizer" name = "meilisearch-tokenizer"
version = "0.6.1" version = "0.8.2"
authors = ["Kerollmops <renault.cle@gmail.com>"] authors = ["Kerollmops <renault.cle@gmail.com>"]
edition = "2018" edition = "2018"

Some files were not shown because too many files have changed in this diff Show More