mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-19 21:10:34 +00:00
Compare commits
132 Commits
Author | SHA1 | Date | |
---|---|---|---|
f045e111ea | |||
87a76c2a60 | |||
4edaebab90 | |||
b43137b508 | |||
118c673eaf | |||
a9a2d3bca3 | |||
4a9e56aa4f | |||
14bb9505eb | |||
d937aeac0a | |||
dd540d2540 | |||
4ecaf99047 | |||
445a6c9ea2 | |||
67b7d60cb0 | |||
94b3e8e56e | |||
89b5ae63fc | |||
2a79dc9ded | |||
5ed62dbf76 | |||
cb267b68ed | |||
6539be6c46 | |||
a23bdb31a3 | |||
9014290875 | |||
1903302a74 | |||
75c3cb4bb6 | |||
bfd0f806f8 | |||
afab8a7846 | |||
afacdbc7a0 | |||
18a50b4dac | |||
fb69769991 | |||
750e7382c6 | |||
2464cc7a6d | |||
f078cbac4d | |||
aa545e5386 | |||
9711100ff1 | |||
8c49ee1b3b | |||
44cb7f68f9 | |||
25dc2ad66f | |||
624bd56459 | |||
7a6615cfa7 | |||
bcad3ffd7c | |||
98d87fa1ff | |||
7e00bf4bfa | |||
476aecf86d | |||
c39b358518 | |||
bd5d25429b | |||
982fb7b786 | |||
7dc628965c | |||
d114250ebb | |||
8eec3bcdc2 | |||
0583cd8e5d | |||
83b6fc48e1 | |||
4b5437a882 | |||
de4caef468 | |||
36b763b84e | |||
c06dd35af1 | |||
51b7cb2722 | |||
7f5fb50307 | |||
4262561596 | |||
8471796987 | |||
2775aeb6ac | |||
a747e79e5d | |||
5773c5c865 | |||
51d7c84e73 | |||
6f0b6933e6 | |||
f5a936614a | |||
308630c094 | |||
f54397e0cf | |||
754efe1f42 | |||
05c30c879f | |||
99e8d4adae | |||
ac63f1cd7a | |||
169749396b | |||
a0637c2c6d | |||
edbba64711 | |||
9ba711dfe5 | |||
6bce83dde8 | |||
629a658c75 | |||
2f6c55ef78 | |||
a6457718f2 | |||
3bf23a7c59 | |||
bbe3a10107 | |||
37ee0f36c1 | |||
e92f544fd1 | |||
d7b49fa671 | |||
41707e3245 | |||
3c51e9f5ed | |||
7d3e937134 | |||
6445eea946 | |||
ced6cc0e23 | |||
944a3943e5 | |||
d419f151a0 | |||
b2124822a3 | |||
f60b912f12 | |||
e1f956ce18 | |||
ab16e2eff1 | |||
3da607749f | |||
a626e5e935 | |||
3d73a4895e | |||
979b01a1c0 | |||
38cf489acf | |||
60264763f4 | |||
d55124e524 | |||
643933c3b0 | |||
44fd9384bd | |||
75d0d2df6c | |||
92d9283d1a | |||
9b46887f75 | |||
ad267cbe59 | |||
029772e11f | |||
2ef888d100 | |||
4e1e41994c | |||
0545424781 | |||
69af8e9e3d | |||
9c7abebde4 | |||
e240591128 | |||
0bceaa5669 | |||
3423c0b246 | |||
0953d99198 | |||
7ad835baf5 | |||
8309e00ed3 | |||
4f6a6b1359 | |||
21253a2bcb | |||
8e9296c66f | |||
641d12fb2d | |||
2019db972d | |||
0d2f5d3fe0 | |||
21567eeb8f | |||
b1272d05b4 | |||
feb12a581e | |||
ce7a9073e1 | |||
4ae2097cdc | |||
1f2ab71bb6 | |||
9c0956049a |
40
.github/ISSUE_TEMPLATE/tracking-issue.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/tracking-issue.md
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
---
|
||||
name: Tracking issue
|
||||
about: Template for a tracking issue
|
||||
title: ''
|
||||
labels: tracking-issue
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# Summary
|
||||
|
||||
One paragraph to explain the feature.
|
||||
|
||||
# Motivations
|
||||
|
||||
Why are we doing this? What use cases does it support? What is the expected outcome?
|
||||
|
||||
# Explanation
|
||||
|
||||
Explain the proposal like it was the final documentation of this proposal.
|
||||
|
||||
- What is changing for end-users.
|
||||
- How it works.
|
||||
- What is breaking?
|
||||
- Examples.
|
||||
|
||||
# Implementation
|
||||
|
||||
Explain the technical specificities that will need to be known or done in order to implement this proposal.
|
||||
|
||||
## Steps
|
||||
|
||||
Describe each step to create the feature with it's associated issue/PR.
|
||||
|
||||
# Related
|
||||
|
||||
- [ ] Validated by the team (@people needed)
|
||||
- [ ] Test added
|
||||
- [ ] [Documentation](https://github.com/meilisearch/documentation/issues/#xxx) //Change xxx or remove the line
|
||||
- [ ] [SDK/Integrations](https://github.com/meilisearch/integration-guides/issues/#xxx) //Change xxx or remove the line
|
9
.github/workflows/publish-binaries.yml
vendored
9
.github/workflows/publish-binaries.yml
vendored
@ -1,9 +1,8 @@
|
||||
name: Publish binaries to GitHub release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
name: Publish binaries to release
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
|
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
@ -2,7 +2,7 @@ name: Publish deb pkg to GitHub release & APT repository & Homebrew
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
debian:
|
||||
|
2
.github/workflows/publish-docker-latest.yml
vendored
2
.github/workflows/publish-docker-latest.yml
vendored
@ -1,7 +1,7 @@
|
||||
---
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
types: [released]
|
||||
|
||||
name: Publish latest image to Docker Hub
|
||||
|
||||
|
65
.github/workflows/test.yml
vendored
65
.github/workflows/test.yml
vendored
@ -1,5 +1,12 @@
|
||||
---
|
||||
on: [pull_request]
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- release-v*
|
||||
- trying
|
||||
- staging
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+' # this only concerns tags on stable
|
||||
|
||||
name: Test binaries with cargo test
|
||||
|
||||
@ -10,7 +17,6 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions-rs/toolchain@v1
|
||||
@ -18,11 +24,17 @@ jobs:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Run cargo test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --locked --release
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
|
||||
build-image:
|
||||
name: Test the build of Docker image
|
||||
runs-on: ubuntu-latest
|
||||
@ -30,3 +42,52 @@ jobs:
|
||||
- uses: actions/checkout@v1
|
||||
- run: docker build . --file Dockerfile -t meilisearch
|
||||
name: Docker build
|
||||
|
||||
## A push occurred on a release branch, a prerelease is created and assets are generated
|
||||
prerelease:
|
||||
name: create prerelease
|
||||
needs: [check, build-image]
|
||||
if: ${{ contains(github.ref, 'release-') && github.event_name == 'push' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get version number
|
||||
id: version-number
|
||||
run: echo "##[set-output name=number;]$(echo ${{ github.ref }} | sed 's/.*\(v.*\)/\1/')"
|
||||
- name: Get commit count
|
||||
id: commit-count
|
||||
run: echo "##[set-output name=count;]$(git rev-list remotes/origin/master..remotes/origin/release-${{ steps.version-number.outputs.number }} --count)"
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PUBLISH_TOKEN }} # Personal Access Token
|
||||
with:
|
||||
tag_name: ${{ steps.version-number.outputs.number }}rc${{ steps.commit-count.outputs.count }}
|
||||
release_name: Pre-release ${{ steps.version-number.outputs.number }}-rc${{ steps.commit-count.outputs.count }}
|
||||
prerelease: true
|
||||
|
||||
## If a tag is pushed, a release is created for this tag, and assets will be generated
|
||||
release:
|
||||
name: create release
|
||||
needs: [check, build-image]
|
||||
if: ${{ contains(github.ref, 'tags/v') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Get version number
|
||||
id: version-number
|
||||
run: echo "##[set-output name=number;]$(echo ${{ github.ref }} | sed 's/.*\(v.*\)/\1/')"
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.PUBLISH_TOKEN }} # PAT
|
||||
with:
|
||||
tag_name: ${{ steps.version-number.outputs.number }}
|
||||
release_name: Meilisearch ${{ steps.version-number.outputs.number }}
|
||||
prerelease: false
|
||||
|
29
CHANGELOG.md
29
CHANGELOG.md
@ -1,3 +1,32 @@
|
||||
## v0.14.1
|
||||
|
||||
- Fix version mismatch in snapshot importation (#959)
|
||||
|
||||
## v0.14.0
|
||||
|
||||
- Fix facet distribution case (#797)
|
||||
- Snapshotting (#839)
|
||||
- Fix bucket-sort unwrap bug (#915)
|
||||
|
||||
## v0.13.0
|
||||
|
||||
- placeholder search (#771)
|
||||
- Add database version mismatch check (#794)
|
||||
- Displayed and searchable attributes wildcard (#846)
|
||||
- Remove sys-info route (#810)
|
||||
- Check database version mismatch (#794)
|
||||
- Fix unique docid bug (#841)
|
||||
- Error codes in updates (#792)
|
||||
- Sentry disable argument (#813)
|
||||
- Log analytics if enabled (#825)
|
||||
- Fix default values displayed on web interface (#874)
|
||||
|
||||
## v0.12.0
|
||||
|
||||
- Fix long documents not being indexed completely bug (#816)
|
||||
- Fix distinct attribute returning id instead of name (#800)
|
||||
- error code rename (#805)
|
||||
|
||||
## v0.11.1
|
||||
|
||||
- Fix facet cache on document update (#789)
|
||||
|
105
Cargo.lock
generated
105
Cargo.lock
generated
@ -301,10 +301,10 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "adler32"
|
||||
version = "1.0.4"
|
||||
name = "adler"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2"
|
||||
checksum = "ccc9a9dd069569f212bc4330af9f17c4afb5e8ce185e83dbb14f1349dda18b10"
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
@ -804,12 +804,6 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "doc-comment"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
|
||||
|
||||
[[package]]
|
||||
name = "dtoa"
|
||||
version = "0.4.5"
|
||||
@ -895,10 +889,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.0.14"
|
||||
name = "filetime"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cfff41391129e0a856d6d822600b8d71179d46879e310417eb9c762eb178b42"
|
||||
checksum = "affc17579b132fc2461adf7c575cc6e8b134ebca52c51f5411388965227dc695"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"winapi 0.3.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.0.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68c90b0fc46cf89d227cc78b40e494ff81287a92dd07631e5af0d06fe3cf885e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"crc32fast",
|
||||
@ -1059,15 +1065,6 @@ dependencies = [
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getopts"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
|
||||
dependencies = [
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.1.14"
|
||||
@ -1496,7 +1493,7 @@ checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-core"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"assert_matches",
|
||||
@ -1543,14 +1540,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-error"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"actix-http",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-http"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
@ -1563,6 +1560,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"crossbeam-channel",
|
||||
"env_logger",
|
||||
"flate2",
|
||||
"futures",
|
||||
"http 0.1.21",
|
||||
"indexmap",
|
||||
@ -1574,7 +1572,6 @@ dependencies = [
|
||||
"meilisearch-schema",
|
||||
"meilisearch-tokenizer",
|
||||
"mime",
|
||||
"pretty-bytes",
|
||||
"rand 0.7.3",
|
||||
"regex",
|
||||
"rustls 0.16.0",
|
||||
@ -1587,8 +1584,9 @@ dependencies = [
|
||||
"siphasher",
|
||||
"slice-group-by",
|
||||
"structopt",
|
||||
"sysinfo",
|
||||
"tar",
|
||||
"tempdir",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"ureq",
|
||||
"vergen",
|
||||
@ -1598,7 +1596,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-schema"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"meilisearch-error",
|
||||
@ -1609,7 +1607,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-tokenizer"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"deunicode",
|
||||
"slice-group-by",
|
||||
@ -1617,7 +1615,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-types"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"zerocopy",
|
||||
@ -1656,11 +1654,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.3.6"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa679ff6578b1cddee93d7e82e263b94a575e0bfced07284eb0c037c1d2416a5"
|
||||
checksum = "be0f75932c1f6cfae3c04000e40114adf955636e19040f9c0a2c380702aa1c7f"
|
||||
dependencies = [
|
||||
"adler32",
|
||||
"adler",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1729,15 +1727,6 @@ dependencies = [
|
||||
"void",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ntapi"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a31937dea023539c72ddae0e3571deadc1414b300483fa7aaec176168cfa9d2"
|
||||
dependencies = [
|
||||
"winapi 0.3.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-integer"
|
||||
version = "0.1.42"
|
||||
@ -1954,16 +1943,6 @@ version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "237a5ed80e274dbc66f86bd59c1e25edc039660be53194b5fe0a482e0f2612ea"
|
||||
|
||||
[[package]]
|
||||
name = "pretty-bytes"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "009d6edd2c1dbf2e1c0cd48a2f7766e03498d49ada7109a01c6911815c685316"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"getopts",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-error"
|
||||
version = "1.0.2"
|
||||
@ -2622,18 +2601,15 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sysinfo"
|
||||
version = "0.14.5"
|
||||
name = "tar"
|
||||
version = "0.4.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b796215da5a4b2a1a5db53ee55866c13b74a89acd259ab762eb10e28e937cb5"
|
||||
checksum = "c8a4c1d0bee3230179544336c15eefb563cf0302955d962e456542323e8c2e8a"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"doc-comment",
|
||||
"filetime",
|
||||
"libc",
|
||||
"ntapi",
|
||||
"once_cell",
|
||||
"rayon",
|
||||
"winapi 0.3.8",
|
||||
"redox_syscall",
|
||||
"xattr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3213,6 +3189,15 @@ dependencies = [
|
||||
"winapi-build",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xattr"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "244c3741f4240ef46274860397c7c74e50eb23624996930e484c16679633a54c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.3.0"
|
||||
|
63
README.md
63
README.md
@ -2,7 +2,6 @@
|
||||
<img src="assets/logo.svg" alt="MeiliSearch" width="200" height="200" />
|
||||
</p>
|
||||
|
||||
|
||||
<h1 align="center">MeiliSearch</h1>
|
||||
|
||||
<h4 align="center">
|
||||
@ -20,6 +19,7 @@
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/blob/master/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
|
||||
<a href="https://slack.meilisearch.com"><img src="https://img.shields.io/badge/slack-MeiliSearch-blue.svg?logo=slack" alt="Slack"></a>
|
||||
<a href="https://github.com/meilisearch/MeiliSearch/discussions" alt="Discussions"><img src="https://img.shields.io/badge/github-discussions-red" /></a>
|
||||
<a href="https://app.bors.tech/repositories/26457"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">⚡ Lightning Fast, Ultra Relevant, and Typo-Tolerant Search Engine 🔍</p>
|
||||
@ -28,15 +28,14 @@
|
||||
For more information about features go to [our documentation](https://docs.meilisearch.com/).
|
||||
|
||||
<p align="center">
|
||||
<a href="https://crates.meilisearch.com"><img src="assets/crates-io-demo.gif" alt="crates.io demo gif" /></a>
|
||||
<img src="assets/movies-web-demo.gif" alt="Web interface gif" />
|
||||
</p>
|
||||
|
||||
> MeiliSearch helps the Rust community find crates on [crates.meilisearch.com](https://crates.meilisearch.com)
|
||||
|
||||
## Features
|
||||
## ✨ Features
|
||||
* Search as-you-type experience (answers < 50 milliseconds)
|
||||
* Full-text search
|
||||
* Typo tolerant (understands typos and miss-spelling)
|
||||
* Faceted search and filters
|
||||
* Supports Kanji characters
|
||||
* Supports Synonym
|
||||
* Easy to install, deploy, and maintain
|
||||
@ -44,28 +43,28 @@ For more information about features go to [our documentation](https://docs.meili
|
||||
* Highly customizable
|
||||
* RESTful API
|
||||
|
||||
## Get started
|
||||
## Getting started
|
||||
|
||||
### Deploy the Server
|
||||
|
||||
#### Run it using Digital Ocean
|
||||
|
||||
[](https://marketplace.digitalocean.com/apps/meilisearch?action=deploy&refcode=7c67bd97e101)
|
||||
|
||||
#### Run it using Docker
|
||||
|
||||
```bash
|
||||
docker run -p 7700:7700 -v $(pwd)/data.ms:/data.ms getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Installing with Homebrew
|
||||
#### Brew (Mac OS)
|
||||
|
||||
```bash
|
||||
brew update && brew install meilisearch
|
||||
meilisearch
|
||||
```
|
||||
|
||||
#### Installing with APT
|
||||
#### Docker
|
||||
|
||||
```bash
|
||||
docker run -p 7700:7700 -v $(pwd)/data.ms:/data.ms getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Run on Digital Ocean
|
||||
|
||||
[](https://marketplace.digitalocean.com/apps/meilisearch?action=deploy&refcode=7c67bd97e101)
|
||||
|
||||
#### APT (Debian & Ubuntu)
|
||||
|
||||
```bash
|
||||
echo "deb [trusted=yes] https://apt.fury.io/meilisearch/ /" > /etc/apt/sources.list.d/fury.list
|
||||
@ -73,7 +72,7 @@ apt update && apt install meilisearch-http
|
||||
meilisearch
|
||||
```
|
||||
|
||||
#### Download the binary
|
||||
#### Download the binary (Linux & Mac OS)
|
||||
|
||||
```bash
|
||||
curl -L https://install.meilisearch.com | sh
|
||||
@ -82,7 +81,7 @@ curl -L https://install.meilisearch.com | sh
|
||||
|
||||
#### Compile and run it from sources
|
||||
|
||||
If you have the Rust toolchain already installed on your local system, clone the repository and change it to your working directory.
|
||||
If you have the latest stable Rust toolchain installed on your local system, clone the repository and change it to your working directory.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/meilisearch/MeiliSearch.git
|
||||
@ -92,6 +91,8 @@ cd MeiliSearch
|
||||
In the cloned repository, compile MeiliSearch.
|
||||
|
||||
```bash
|
||||
rustup override set stable
|
||||
rustup update stable
|
||||
cargo run --release
|
||||
```
|
||||
|
||||
@ -161,33 +162,31 @@ We also deliver an **out-of-the-box web interface** in which you can test MeiliS
|
||||
|
||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter MeiliSearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||
|
||||
<p align="center">
|
||||
<img src="assets/movies-web-demo.gif" alt="Web interface gif" />
|
||||
</p>
|
||||
| [See the gif above](#demo)
|
||||
|
||||
### Documentation
|
||||
## Documentation
|
||||
|
||||
Now that your MeiliSearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
Hey! We're glad you're thinking about contributing to MeiliSearch! If you think something is missing or could be improved, please open issues and pull requests. If you'd like to help this project grow, we'd love to have you! To start contributing, checking [issues tagged as "good-first-issue"](https://github.com/meilisearch/MeiliSearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) is a good start!
|
||||
|
||||
### Analytic Events
|
||||
## Telemetry
|
||||
|
||||
Every hour, events are being sent to our Amplitude instance so we can know how many people are using MeiliSearch.<br/>
|
||||
MeiliSearch collects anonymous data regarding general usage.
|
||||
This helps us better understand developers usage of MeiliSearch features.<br/>
|
||||
To see what information we're retrieving, please see the complete list [on the dedicated issue](https://github.com/meilisearch/MeiliSearch/issues/720).<br/>
|
||||
We also use Sentry to make us crash and error reports. If you want to know more about what Sentry collects, please visit their [privacy policy website](https://sentry.io/privacy/).<br/>
|
||||
If this doesn't suit you, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
|
||||
This program is optionnal, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
|
||||
|
||||
## Contact
|
||||
## 💌 Contact
|
||||
|
||||
Feel free to contact us about any questions you may have:
|
||||
* At [bonjour@meilisearch.com](mailto:bonjour@meilisearch.com): English or French is welcome! 🇬🇧 🇫🇷
|
||||
* At [bonjour@meilisearch.com](mailto:bonjour@meilisearch.com)
|
||||
* Via the chat box available on every page of [our documentation](https://docs.meilisearch.com/) and on [our landing page](https://www.meilisearch.com/).
|
||||
* 🆕 Join our [GitHub Discussions forum](https://github.com/meilisearch/MeiliSearch/discussions) (BETA hype!)
|
||||
* 🆕 Join our [GitHub Discussions forum](https://github.com/meilisearch/MeiliSearch/discussions)
|
||||
* Join our [Slack community](https://slack.meilisearch.com/).
|
||||
* By opening an issue.
|
||||
|
||||
Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
MeiliSearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
|
3
bors.toml
Normal file
3
bors.toml
Normal file
@ -0,0 +1,3 @@
|
||||
status = ["Test on macos-latest", "Test on ubuntu-latest"]
|
||||
# 4 hours timeout
|
||||
timeout-sec = 14400
|
38
bump.sh
Executable file
38
bump.sh
Executable file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
NEW_VERSION=$1
|
||||
|
||||
if [ -z "$NEW_VERSION" ]
|
||||
then
|
||||
echo "error: a version number must be provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# find current version
|
||||
CURRENT_VERSION=$(cat **/*.toml | grep meilisearch | grep version | sed 's/.*\([0-9]\+\.[0-9]\+\.[0-9]\+\).*/\1/' | sed "1q;d")
|
||||
|
||||
# bump all version in .toml
|
||||
echo "bumping from version $CURRENT_VERSION to version $NEW_VERSION"
|
||||
while true
|
||||
do
|
||||
read -r -p "Continue (y/n)?" choice
|
||||
case "$choice" in
|
||||
y|Y ) break;;
|
||||
n|N ) echo "aborting bump" && exit 0;;
|
||||
* ) echo "invalid choice";;
|
||||
esac
|
||||
done
|
||||
# update all crate version
|
||||
sed -i "s/version = \"$CURRENT_VERSION\"/version = \"$NEW_VERSION\"/" **/*.toml
|
||||
|
||||
printf "running cargo check: "
|
||||
|
||||
CARGO_CHECK=$(cargo check 2>&1)
|
||||
|
||||
if [ $? != "0" ]
|
||||
then
|
||||
printf "\033[31;1m FAIL \033[0m\n"
|
||||
printf "$CARGO_CHECK"
|
||||
exit 1
|
||||
fi
|
||||
printf "\033[32;1m OK \033[0m\n"
|
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-core"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
@ -24,10 +24,10 @@ intervaltree = "0.2.5"
|
||||
itertools = "0.9.0"
|
||||
levenshtein_automata = { version = "0.2.0", features = ["fst_automaton"] }
|
||||
log = "0.4.8"
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.11.1" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.11.1" }
|
||||
meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.11.1" }
|
||||
meilisearch-types = { path = "../meilisearch-types", version = "0.11.1" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.14.1" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.14.1" }
|
||||
meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.14.1" }
|
||||
meilisearch-types = { path = "../meilisearch-types", version = "0.14.1" }
|
||||
once_cell = "1.3.1"
|
||||
ordered-float = { version = "1.0.2", features = ["serde"] }
|
||||
pest = { git = "https://github.com/MarinPostma/pest.git", tag = "meilisearch-patch1" }
|
||||
|
@ -39,7 +39,7 @@ fn prepare_database(path: &Path) -> Database {
|
||||
let file = File::open(path).unwrap();
|
||||
let reader = BufReader::new(file);
|
||||
let settings: Settings = serde_json::from_reader(reader).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
db.update_write::<_, _, Box<dyn Error>>(|writer| {
|
||||
|
@ -123,7 +123,7 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box<dy
|
||||
let settings = {
|
||||
let string = fs::read_to_string(&command.settings)?;
|
||||
let settings: Settings = serde_json::from_str(&string).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
db.update_write(|w| index.settings_update(w, settings))?;
|
||||
@ -368,7 +368,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
|
||||
});
|
||||
}
|
||||
|
||||
let result = builder.query(ref_reader, &query, 0..command.number_results)?;
|
||||
let result = builder.query(ref_reader, Some(&query), 0..command.number_results)?;
|
||||
|
||||
let mut retrieve_duration = Duration::default();
|
||||
|
||||
|
@ -9,17 +9,17 @@ use std::time::Instant;
|
||||
use std::fmt;
|
||||
|
||||
use compact_arena::{SmallArena, Idx32, mk_arena};
|
||||
use log::debug;
|
||||
use meilisearch_types::DocIndex;
|
||||
use log::{debug, error};
|
||||
use sdset::{Set, SetBuf, exponential_search, SetOperation, Counter, duo::OpBuilder};
|
||||
use slice_group_by::{GroupBy, GroupByMut};
|
||||
|
||||
use crate::error::Error;
|
||||
use meilisearch_types::DocIndex;
|
||||
|
||||
use crate::criterion::{Criteria, Context, ContextMut};
|
||||
use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
|
||||
use crate::raw_document::RawDocument;
|
||||
use crate::{database::MainT, reordered_attrs::ReorderedAttrs};
|
||||
use crate::{store, Document, DocumentId, MResult};
|
||||
use crate::{store, Document, DocumentId, MResult, Index, RankedMap, MainReader, Error};
|
||||
use crate::query_tree::{create_query_tree, traverse_query_tree};
|
||||
use crate::query_tree::{Operation, QueryResult, QueryKind, QueryId, PostingsKey};
|
||||
use crate::query_tree::Context as QTContext;
|
||||
@ -33,21 +33,17 @@ pub struct SortResult {
|
||||
pub exhaustive_facets_count: Option<bool>,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn bucket_sort<'c, FI>(
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, (&str, Cow<Set<DocumentId>>)>>>,
|
||||
filter: Option<FI>,
|
||||
criteria: Criteria<'c>,
|
||||
searchable_attrs: Option<ReorderedAttrs>,
|
||||
main_store: store::Main,
|
||||
postings_lists_store: store::PostingsLists,
|
||||
documents_fields_counts_store: store::DocumentsFieldsCounts,
|
||||
synonyms_store: store::Synonyms,
|
||||
prefix_documents_cache_store: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
||||
index: &Index,
|
||||
) -> MResult<SortResult>
|
||||
where
|
||||
FI: Fn(DocumentId) -> bool,
|
||||
@ -68,26 +64,21 @@ where
|
||||
distinct_size,
|
||||
criteria,
|
||||
searchable_attrs,
|
||||
main_store,
|
||||
postings_lists_store,
|
||||
documents_fields_counts_store,
|
||||
synonyms_store,
|
||||
prefix_documents_cache_store,
|
||||
prefix_postings_lists_cache_store,
|
||||
index,
|
||||
);
|
||||
}
|
||||
|
||||
let mut result = SortResult::default();
|
||||
|
||||
let words_set = main_store.words_fst(reader)?;
|
||||
let stop_words = main_store.stop_words_fst(reader)?;
|
||||
let words_set = index.main.words_fst(reader)?;
|
||||
let stop_words = index.main.stop_words_fst(reader)?;
|
||||
|
||||
let context = QTContext {
|
||||
words_set,
|
||||
stop_words,
|
||||
synonyms: synonyms_store,
|
||||
postings_lists: postings_lists_store,
|
||||
prefix_postings_lists: prefix_postings_lists_cache_store,
|
||||
synonyms: index.synonyms,
|
||||
postings_lists: index.postings_lists,
|
||||
prefix_postings_lists: index.prefix_postings_lists_cache,
|
||||
};
|
||||
|
||||
let (operation, mapping) = create_query_tree(reader, &context, query)?;
|
||||
@ -156,7 +147,7 @@ where
|
||||
reader,
|
||||
postings_lists: &mut arena,
|
||||
query_mapping: &mapping,
|
||||
documents_fields_counts_store,
|
||||
documents_fields_counts_store: index.documents_fields_counts,
|
||||
};
|
||||
|
||||
criterion.prepare(ctx, &mut group)?;
|
||||
@ -189,7 +180,7 @@ where
|
||||
debug!("criterion loop took {:.02?}", before_criterion_loop.elapsed());
|
||||
debug!("proximity evaluation called {} times", proximity_count.load(Ordering::Relaxed));
|
||||
|
||||
let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
||||
let schema = index.main.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
||||
let iter = raw_documents.into_iter().skip(range.start).take(range.len());
|
||||
let iter = iter.map(|rd| Document::from_raw(rd, &queries_kinds, &arena, searchable_attrs.as_ref(), &schema));
|
||||
let documents = iter.collect();
|
||||
@ -202,23 +193,19 @@ where
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, (&str, Cow<Set<DocumentId>>)>>>,
|
||||
filter: Option<FI>,
|
||||
distinct: FD,
|
||||
distinct_size: usize,
|
||||
criteria: Criteria<'c>,
|
||||
searchable_attrs: Option<ReorderedAttrs>,
|
||||
main_store: store::Main,
|
||||
postings_lists_store: store::PostingsLists,
|
||||
documents_fields_counts_store: store::DocumentsFieldsCounts,
|
||||
synonyms_store: store::Synonyms,
|
||||
_prefix_documents_cache_store: store::PrefixDocumentsCache,
|
||||
prefix_postings_lists_cache_store: store::PrefixPostingsListsCache,
|
||||
index: &Index,
|
||||
) -> MResult<SortResult>
|
||||
where
|
||||
FI: Fn(DocumentId) -> bool,
|
||||
@ -226,15 +213,15 @@ where
|
||||
{
|
||||
let mut result = SortResult::default();
|
||||
|
||||
let words_set = main_store.words_fst(reader)?;
|
||||
let stop_words = main_store.stop_words_fst(reader)?;
|
||||
let words_set = index.main.words_fst(reader)?;
|
||||
let stop_words = index.main.stop_words_fst(reader)?;
|
||||
|
||||
let context = QTContext {
|
||||
words_set,
|
||||
stop_words,
|
||||
synonyms: synonyms_store,
|
||||
postings_lists: postings_lists_store,
|
||||
prefix_postings_lists: prefix_postings_lists_cache_store,
|
||||
synonyms: index.synonyms,
|
||||
postings_lists: index.postings_lists,
|
||||
prefix_postings_lists: index.prefix_postings_lists_cache,
|
||||
};
|
||||
|
||||
let (operation, mapping) = create_query_tree(reader, &context, query)?;
|
||||
@ -313,7 +300,7 @@ where
|
||||
reader,
|
||||
postings_lists: &mut arena,
|
||||
query_mapping: &mapping,
|
||||
documents_fields_counts_store,
|
||||
documents_fields_counts_store: index.documents_fields_counts,
|
||||
};
|
||||
|
||||
let before_criterion_preparation = Instant::now();
|
||||
@ -378,17 +365,23 @@ where
|
||||
// once we classified the documents related to the current
|
||||
// automatons we save that as the next valid result
|
||||
let mut seen = BufferedDistinctMap::new(&mut distinct_map);
|
||||
let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
||||
let schema = index.main.schema(reader)?.ok_or(Error::SchemaMissing)?;
|
||||
|
||||
let mut documents = Vec::with_capacity(range.len());
|
||||
for raw_document in raw_documents.into_iter().skip(distinct_raw_offset) {
|
||||
let filter_accepted = match &filter {
|
||||
Some(_) => filter_map.remove(&raw_document.id).unwrap(),
|
||||
Some(_) => filter_map.remove(&raw_document.id).unwrap_or_else(|| {
|
||||
error!("error during filtering: expected value for document id {}", &raw_document.id.0);
|
||||
Default::default()
|
||||
}),
|
||||
None => true,
|
||||
};
|
||||
|
||||
if filter_accepted {
|
||||
let key = key_cache.remove(&raw_document.id).unwrap();
|
||||
let key = key_cache.remove(&raw_document.id).unwrap_or_else(|| {
|
||||
error!("error during distinct: expected value for document id {}", &raw_document.id.0);
|
||||
Default::default()
|
||||
});
|
||||
let distinct_accepted = match key {
|
||||
Some(key) => seen.register(key),
|
||||
None => seen.register_without_key(),
|
||||
@ -601,19 +594,66 @@ impl Deref for PostingsListView<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// sorts documents ids according to user defined ranking rules.
|
||||
pub fn placeholder_document_sort(
|
||||
document_ids: &mut [DocumentId],
|
||||
index: &store::Index,
|
||||
reader: &MainReader,
|
||||
ranked_map: &RankedMap
|
||||
) -> MResult<()> {
|
||||
use crate::settings::RankingRule;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
enum SortOrder {
|
||||
Asc,
|
||||
Desc,
|
||||
}
|
||||
|
||||
if let Some(ranking_rules) = index.main.ranking_rules(reader)? {
|
||||
let schema = index.main.schema(reader)?
|
||||
.ok_or(Error::SchemaMissing)?;
|
||||
|
||||
// Select custom rules from ranking rules, and map them to custom rules
|
||||
// containing a field_id
|
||||
let ranking_rules = ranking_rules.iter().filter_map(|r|
|
||||
match r {
|
||||
RankingRule::Asc(name) => schema.id(name).map(|f| (f, SortOrder::Asc)),
|
||||
RankingRule::Desc(name) => schema.id(name).map(|f| (f, SortOrder::Desc)),
|
||||
_ => None,
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
document_ids.sort_unstable_by(|a, b| {
|
||||
for (field_id, order) in &ranking_rules {
|
||||
let a_value = ranked_map.get(*a, *field_id);
|
||||
let b_value = ranked_map.get(*b, *field_id);
|
||||
let (a, b) = match order {
|
||||
SortOrder::Asc => (a_value, b_value),
|
||||
SortOrder::Desc => (b_value, a_value),
|
||||
};
|
||||
match a.cmp(&b) {
|
||||
Ordering::Equal => continue,
|
||||
ordering => return ordering,
|
||||
}
|
||||
}
|
||||
Ordering::Equal
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// For each entry in facet_docids, calculates the number of documents in the intersection with candidate_docids.
|
||||
fn facet_count(
|
||||
facet_docids: HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>,
|
||||
pub fn facet_count(
|
||||
facet_docids: HashMap<String, HashMap<String, (&str, Cow<Set<DocumentId>>)>>,
|
||||
candidate_docids: &Set<DocumentId>,
|
||||
) -> HashMap<String, HashMap<String, usize>> {
|
||||
let mut facets_counts = HashMap::with_capacity(facet_docids.len());
|
||||
for (key, doc_map) in facet_docids {
|
||||
let mut count_map = HashMap::with_capacity(doc_map.len());
|
||||
for (value, docids) in doc_map {
|
||||
for (_, (value, docids)) in doc_map {
|
||||
let mut counter = Counter::new();
|
||||
let op = OpBuilder::new(docids.as_ref(), candidate_docids).intersection();
|
||||
SetOperation::<DocumentId>::extend_collection(op, &mut counter);
|
||||
count_map.insert(value, counter.0);
|
||||
count_map.insert(value.to_string(), counter.0);
|
||||
}
|
||||
facets_counts.insert(key, count_map);
|
||||
}
|
||||
|
@ -92,6 +92,7 @@ impl<'a> CriteriaBuilder<'a> {
|
||||
self.inner.reserve(additional)
|
||||
}
|
||||
|
||||
#[allow(clippy::should_implement_trait)]
|
||||
pub fn add<C: 'a>(mut self, criterion: C) -> CriteriaBuilder<'a>
|
||||
where
|
||||
C: Criterion,
|
||||
|
@ -22,6 +22,7 @@ impl Criterion for Typo {
|
||||
// It is safe to panic on input number higher than 3,
|
||||
// the number of typos is never bigger than that.
|
||||
#[inline]
|
||||
#[allow(clippy::approx_constant)]
|
||||
fn custom_log10(n: u8) -> f32 {
|
||||
match n {
|
||||
0 => 0.0, // log(1)
|
||||
|
@ -3,13 +3,15 @@ use std::fs::File;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::{fs, thread};
|
||||
use std::io::{Read, Write, ErrorKind};
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use crossbeam_channel::{Receiver, Sender};
|
||||
use heed::types::{Str, Unit, SerdeBincode};
|
||||
use heed::CompactionOption;
|
||||
use heed::types::{Str, Unit, SerdeBincode};
|
||||
use log::{debug, error};
|
||||
use meilisearch_schema::Schema;
|
||||
use regex::Regex;
|
||||
|
||||
use crate::{store, update, Index, MResult, Error};
|
||||
|
||||
@ -38,6 +40,7 @@ pub struct Database {
|
||||
indexes_store: heed::Database<Str, Unit>,
|
||||
indexes: RwLock<HashMap<String, (Index, thread::JoinHandle<MResult<()>>)>>,
|
||||
update_fn: Arc<ArcSwapFn>,
|
||||
database_version: (u32, u32, u32),
|
||||
}
|
||||
|
||||
pub struct DatabaseOptions {
|
||||
@ -82,8 +85,7 @@ fn update_awaiter(
|
||||
update_fn: Arc<ArcSwapFn>,
|
||||
index: Index,
|
||||
) -> MResult<()> {
|
||||
let mut receiver = receiver.into_iter();
|
||||
while let Some(event) = receiver.next() {
|
||||
for event in receiver {
|
||||
|
||||
// if we receive a *MustClear* event, clear the index and break the loop
|
||||
if let UpdateEvent::MustClear = event {
|
||||
@ -162,11 +164,81 @@ fn update_awaiter(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensures Meilisearch version is compatible with the database, returns an error versions mismatch.
|
||||
/// If create is set to true, a VERSION file is created with the current version.
|
||||
fn version_guard(path: &Path, create: bool) -> MResult<(u32, u32, u32)> {
|
||||
let current_version_major = env!("CARGO_PKG_VERSION_MAJOR");
|
||||
let current_version_minor = env!("CARGO_PKG_VERSION_MINOR");
|
||||
let current_version_patch = env!("CARGO_PKG_VERSION_PATCH");
|
||||
let version_path = path.join("VERSION");
|
||||
|
||||
match File::open(&version_path) {
|
||||
Ok(mut file) => {
|
||||
let mut version = String::new();
|
||||
file.read_to_string(&mut version)?;
|
||||
// Matches strings like XX.XX.XX
|
||||
let re = Regex::new(r"(\d+).(\d+).(\d+)").unwrap();
|
||||
|
||||
// Make sure there is a result
|
||||
let version = re
|
||||
.captures_iter(&version)
|
||||
.next()
|
||||
.ok_or_else(|| Error::VersionMismatch("bad VERSION file".to_string()))?;
|
||||
// the first is always the complete match, safe to unwrap because we have a match
|
||||
let version_major = version.get(1).unwrap().as_str();
|
||||
let version_minor = version.get(2).unwrap().as_str();
|
||||
let version_patch = version.get(3).unwrap().as_str();
|
||||
|
||||
if version_major != current_version_major || version_minor != current_version_minor {
|
||||
Err(Error::VersionMismatch(format!("{}.{}.XX", version_major, version_minor)))
|
||||
} else {
|
||||
Ok((
|
||||
version_major.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
version_minor.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
version_patch.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?
|
||||
))
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
match error.kind() {
|
||||
ErrorKind::NotFound => {
|
||||
if create {
|
||||
// when no version file is found, and we've been told to create one,
|
||||
// create a new file with the current version in it.
|
||||
let mut version_file = File::create(&version_path)?;
|
||||
version_file.write_all(format!("{}.{}.{}",
|
||||
current_version_major,
|
||||
current_version_minor,
|
||||
current_version_patch).as_bytes())?;
|
||||
|
||||
Ok((
|
||||
current_version_major.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
current_version_minor.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?,
|
||||
current_version_patch.parse().or_else(|e| Err(Error::VersionMismatch(format!("error parsing database version: {}", e))))?
|
||||
))
|
||||
} else {
|
||||
// when no version file is found and we were not told to create one, this
|
||||
// means that the version is inferior to the one this feature was added in.
|
||||
Err(Error::VersionMismatch("<0.12.0".to_string()))
|
||||
}
|
||||
}
|
||||
_ => Err(error.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Database {
|
||||
pub fn open_or_create(path: impl AsRef<Path>, options: DatabaseOptions) -> MResult<Database> {
|
||||
let main_path = path.as_ref().join("main");
|
||||
let update_path = path.as_ref().join("update");
|
||||
|
||||
//create db directory
|
||||
fs::create_dir_all(&path)?;
|
||||
|
||||
// create file only if main db wasn't created before (first run)
|
||||
let database_version = version_guard(path.as_ref(), !main_path.exists() && !update_path.exists())?;
|
||||
|
||||
fs::create_dir_all(&main_path)?;
|
||||
let env = heed::EnvOpenOptions::new()
|
||||
.map_size(options.main_map_size)
|
||||
@ -243,6 +315,7 @@ impl Database {
|
||||
indexes_store,
|
||||
indexes: RwLock::new(indexes),
|
||||
update_fn,
|
||||
database_version,
|
||||
})
|
||||
}
|
||||
|
||||
@ -410,10 +483,19 @@ impl Database {
|
||||
|
||||
let env_path = path.join("main");
|
||||
let env_update_path = path.join("update");
|
||||
let env_version_path = path.join("VERSION");
|
||||
|
||||
fs::create_dir(&env_path)?;
|
||||
fs::create_dir(&env_update_path)?;
|
||||
|
||||
// write Database Version
|
||||
let (current_version_major, current_version_minor, current_version_patch) = self.database_version;
|
||||
let mut version_file = File::create(&env_version_path)?;
|
||||
version_file.write_all(format!("{}.{}.{}",
|
||||
current_version_major,
|
||||
current_version_minor,
|
||||
current_version_patch).as_bytes())?;
|
||||
|
||||
let env_path = env_path.join("data.mdb");
|
||||
let env_file = self.env.copy_to_path(&env_path, CompactionOption::Enabled)?;
|
||||
|
||||
@ -547,7 +629,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut update_writer = db.update_write_txn().unwrap();
|
||||
@ -610,7 +692,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut update_writer = db.update_write_txn().unwrap();
|
||||
@ -672,7 +754,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut update_writer = db.update_write_txn().unwrap();
|
||||
@ -727,7 +809,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut update_writer = db.update_write_txn().unwrap();
|
||||
@ -763,7 +845,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
@ -815,7 +897,7 @@ mod tests {
|
||||
|
||||
// even try to search for a document
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
let SortResult {documents, .. } = index.query_builder().query(&reader, "21 ", 0..20).unwrap();
|
||||
let SortResult {documents, .. } = index.query_builder().query(&reader, Some("21 "), 0..20).unwrap();
|
||||
assert_matches!(documents.len(), 1);
|
||||
|
||||
reader.abort().unwrap();
|
||||
@ -829,7 +911,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
@ -871,7 +953,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
@ -951,7 +1033,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
@ -1090,7 +1172,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
@ -1166,7 +1248,7 @@ mod tests {
|
||||
}
|
||||
"#;
|
||||
let settings: Settings = serde_json::from_str(data).unwrap();
|
||||
settings.into_update().unwrap()
|
||||
settings.to_update().unwrap()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
@ -1213,7 +1295,7 @@ mod tests {
|
||||
|
||||
let builder = index.query_builder_with_criteria(criteria);
|
||||
|
||||
let SortResult {documents, .. } = builder.query(&reader, "Kevin", 0..20).unwrap();
|
||||
let SortResult {documents, .. } = builder.query(&reader, Some("Kevin"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(
|
||||
|
@ -15,22 +15,23 @@ pub type MResult<T> = Result<T, Error>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Io(io::Error),
|
||||
IndexAlreadyExists,
|
||||
MissingPrimaryKey,
|
||||
SchemaMissing,
|
||||
WordIndexMissing,
|
||||
MissingDocumentId,
|
||||
MaxFieldsLimitExceeded,
|
||||
Schema(meilisearch_schema::Error),
|
||||
Heed(heed::Error),
|
||||
Fst(fst::Error),
|
||||
SerdeJson(SerdeJsonError),
|
||||
Bincode(bincode::Error),
|
||||
Serializer(SerializerError),
|
||||
Deserializer(DeserializerError),
|
||||
FilterParseError(PestError<Rule>),
|
||||
FacetError(FacetError),
|
||||
FilterParseError(PestError<Rule>),
|
||||
Fst(fst::Error),
|
||||
Heed(heed::Error),
|
||||
IndexAlreadyExists,
|
||||
Io(io::Error),
|
||||
MaxFieldsLimitExceeded,
|
||||
MissingDocumentId,
|
||||
MissingPrimaryKey,
|
||||
Schema(meilisearch_schema::Error),
|
||||
SchemaMissing,
|
||||
SerdeJson(SerdeJsonError),
|
||||
Serializer(SerializerError),
|
||||
VersionMismatch(String),
|
||||
WordIndexMissing,
|
||||
}
|
||||
|
||||
impl ErrorCode for Error {
|
||||
@ -41,7 +42,7 @@ impl ErrorCode for Error {
|
||||
FacetError(_) => Code::Facet,
|
||||
FilterParseError(_) => Code::Filter,
|
||||
IndexAlreadyExists => Code::IndexAlreadyExists,
|
||||
MissingPrimaryKey => Code::InvalidState,
|
||||
MissingPrimaryKey => Code::MissingPrimaryKey,
|
||||
MissingDocumentId => Code::MissingDocumentId,
|
||||
MaxFieldsLimitExceeded => Code::MaxFieldsLimitExceeded,
|
||||
Schema(s) => s.error_code(),
|
||||
@ -53,6 +54,7 @@ impl ErrorCode for Error {
|
||||
| Bincode(_)
|
||||
| Serializer(_)
|
||||
| Deserializer(_)
|
||||
| VersionMismatch(_)
|
||||
| Io(_) => Code::Internal,
|
||||
}
|
||||
}
|
||||
@ -124,7 +126,10 @@ impl From<BincodeError> for Error {
|
||||
|
||||
impl From<SerializerError> for Error {
|
||||
fn from(error: SerializerError) -> Error {
|
||||
Error::Serializer(error)
|
||||
match error {
|
||||
SerializerError::DocumentIdNotFound => Error::MissingDocumentId,
|
||||
e => Error::Serializer(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -138,22 +143,27 @@ impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use self::Error::*;
|
||||
match self {
|
||||
Io(e) => write!(f, "{}", e),
|
||||
IndexAlreadyExists => write!(f, "index already exists"),
|
||||
MissingPrimaryKey => write!(f, "schema cannot be built without a primary key"),
|
||||
SchemaMissing => write!(f, "this index does not have a schema"),
|
||||
WordIndexMissing => write!(f, "this index does not have a word index"),
|
||||
MissingDocumentId => write!(f, "document id is missing"),
|
||||
MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"),
|
||||
Schema(e) => write!(f, "schema error; {}", e),
|
||||
Heed(e) => write!(f, "heed error; {}", e),
|
||||
Fst(e) => write!(f, "fst error; {}", e),
|
||||
SerdeJson(e) => write!(f, "serde json error; {}", e),
|
||||
Bincode(e) => write!(f, "bincode error; {}", e),
|
||||
Serializer(e) => write!(f, "serializer error; {}", e),
|
||||
Deserializer(e) => write!(f, "deserializer error; {}", e),
|
||||
FilterParseError(e) => write!(f, "error parsing filter; {}", e),
|
||||
FacetError(e) => write!(f, "error processing facet filter: {}", e),
|
||||
FilterParseError(e) => write!(f, "error parsing filter; {}", e),
|
||||
Fst(e) => write!(f, "fst error; {}", e),
|
||||
Heed(e) => write!(f, "heed error; {}", e),
|
||||
IndexAlreadyExists => write!(f, "index already exists"),
|
||||
Io(e) => write!(f, "{}", e),
|
||||
MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"),
|
||||
MissingDocumentId => write!(f, "document id is missing"),
|
||||
MissingPrimaryKey => write!(f, "schema cannot be built without a primary key"),
|
||||
Schema(e) => write!(f, "schema error; {}", e),
|
||||
SchemaMissing => write!(f, "this index does not have a schema"),
|
||||
SerdeJson(e) => write!(f, "serde json error; {}", e),
|
||||
Serializer(e) => write!(f, "serializer error; {}", e),
|
||||
VersionMismatch(version) => write!(f, "Cannot open database, expected MeiliSearch engine version: {}, current engine version: {}.{}.{}",
|
||||
version,
|
||||
env!("CARGO_PKG_VERSION_MAJOR"),
|
||||
env!("CARGO_PKG_VERSION_MINOR"),
|
||||
env!("CARGO_PKG_VERSION_PATCH")),
|
||||
WordIndexMissing => write!(f, "this index does not have a word index"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ impl FacetFilter {
|
||||
bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value).into()),
|
||||
}
|
||||
}
|
||||
return Ok(Self(filter));
|
||||
Ok(Self(filter))
|
||||
}
|
||||
bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value).into()),
|
||||
}
|
||||
@ -164,7 +164,7 @@ impl<'a> heed::BytesDecode<'a> for FacetKey {
|
||||
}
|
||||
|
||||
pub fn add_to_facet_map(
|
||||
facet_map: &mut HashMap<FacetKey, Vec<DocumentId>>,
|
||||
facet_map: &mut HashMap<FacetKey, (String, Vec<DocumentId>)>,
|
||||
field_id: FieldId,
|
||||
value: Value,
|
||||
document_id: DocumentId,
|
||||
@ -175,8 +175,8 @@ pub fn add_to_facet_map(
|
||||
Value::Null => return Ok(()),
|
||||
value => return Err(FacetError::InvalidDocumentAttribute(value.to_string())),
|
||||
};
|
||||
let key = FacetKey::new(field_id, value);
|
||||
facet_map.entry(key).or_insert_with(Vec::new).push(document_id);
|
||||
let key = FacetKey::new(field_id, value.clone());
|
||||
facet_map.entry(key).or_insert_with(|| (value, Vec::new())).1.push(document_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -185,8 +185,10 @@ pub fn facet_map_from_docids(
|
||||
index: &crate::Index,
|
||||
document_ids: &[DocumentId],
|
||||
attributes_for_facetting: &[FieldId],
|
||||
) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> {
|
||||
let mut facet_map = HashMap::new();
|
||||
) -> MResult<HashMap<FacetKey, (String, Vec<DocumentId>)>> {
|
||||
// A hashmap that ascociate a facet key to a pair containing the original facet attribute
|
||||
// string with it's case preserved, and a list of document ids for that facet attribute.
|
||||
let mut facet_map: HashMap<FacetKey, (String, Vec<DocumentId>)> = HashMap::new();
|
||||
for document_id in document_ids {
|
||||
for result in index
|
||||
.documents_fields
|
||||
@ -212,7 +214,7 @@ pub fn facet_map_from_docs(
|
||||
schema: &Schema,
|
||||
documents: &HashMap<DocumentId, IndexMap<String, Value>>,
|
||||
attributes_for_facetting: &[FieldId],
|
||||
) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> {
|
||||
) -> MResult<HashMap<FacetKey, (String, Vec<DocumentId>)>> {
|
||||
let mut facet_map = HashMap::new();
|
||||
let attributes_for_facetting = attributes_for_facetting
|
||||
.iter()
|
||||
|
@ -31,7 +31,7 @@ struct ConditionValue<'a> {
|
||||
|
||||
impl<'a> ConditionValue<'a> {
|
||||
pub fn new(value: &Pair<'a, Rule>) -> Self {
|
||||
let value = match value.as_rule() {
|
||||
match value.as_rule() {
|
||||
Rule::string | Rule::word => {
|
||||
let string = value.as_str();
|
||||
let boolean = match value.as_str() {
|
||||
@ -43,12 +43,11 @@ impl<'a> ConditionValue<'a> {
|
||||
ConditionValue { string, boolean, number }
|
||||
},
|
||||
_ => unreachable!(),
|
||||
};
|
||||
value
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.string.as_ref()
|
||||
self.string
|
||||
}
|
||||
|
||||
pub fn as_number(&self) -> Option<&Number> {
|
||||
@ -73,7 +72,7 @@ fn get_field_value<'a>(schema: &Schema, pair: Pair<'a, Rule>) -> Result<(FieldId
|
||||
let key = items.next().unwrap();
|
||||
let field = schema
|
||||
.id(key.as_str())
|
||||
.ok_or::<PestError<Rule>>(PestError::new_from_span(
|
||||
.ok_or_else(|| PestError::new_from_span(
|
||||
ErrorVariant::CustomError {
|
||||
message: format!(
|
||||
"attribute `{}` not found, available attributes are: {}",
|
||||
|
@ -26,7 +26,7 @@ pub enum Filter<'a> {
|
||||
|
||||
impl<'a> Filter<'a> {
|
||||
pub fn parse(expr: &'a str, schema: &'a Schema) -> FilterResult<'a> {
|
||||
let mut lexed = FilterParser::parse(Rule::prgm, expr.as_ref())?;
|
||||
let mut lexed = FilterParser::parse(Rule::prgm, expr)?;
|
||||
Self::build(lexed.next().unwrap().into_inner(), schema)
|
||||
}
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
#![allow(clippy::type_complexity)]
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate assert_matches;
|
||||
|
@ -6,7 +6,7 @@ use std::str::FromStr;
|
||||
use ordered_float::OrderedFloat;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Copy, Clone, Hash)]
|
||||
#[derive(Serialize, Deserialize, Debug, Copy, Clone)]
|
||||
pub enum Number {
|
||||
Unsigned(u64),
|
||||
Signed(i64),
|
||||
|
@ -1,18 +1,20 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::ops::{Range, Deref};
|
||||
use std::ops::{Deref, Range};
|
||||
use std::time::Duration;
|
||||
|
||||
use either::Either;
|
||||
use sdset::SetOperation;
|
||||
use sdset::{SetOperation, SetBuf, Set};
|
||||
|
||||
use meilisearch_schema::FieldId;
|
||||
|
||||
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct, SortResult, placeholder_document_sort, facet_count};
|
||||
use crate::database::MainT;
|
||||
use crate::bucket_sort::{bucket_sort, bucket_sort_with_distinct, SortResult};
|
||||
use crate::{criterion::Criteria, DocumentId};
|
||||
use crate::{reordered_attrs::ReorderedAttrs, store, MResult};
|
||||
use crate::facets::FacetFilter;
|
||||
use crate::distinct_map::{DistinctMap, BufferedDistinctMap};
|
||||
use crate::Document;
|
||||
use crate::{criterion::Criteria, DocumentId};
|
||||
use crate::{reordered_attrs::ReorderedAttrs, store, MResult, MainReader};
|
||||
|
||||
pub struct QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
criteria: Criteria<'c>,
|
||||
@ -27,10 +29,7 @@ pub struct QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
|
||||
impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
pub fn new(index: &'i store::Index) -> Self {
|
||||
QueryBuilder::with_criteria(
|
||||
index,
|
||||
Criteria::default(),
|
||||
)
|
||||
QueryBuilder::with_criteria(index, Criteria::default())
|
||||
}
|
||||
|
||||
/// sets facet attributes to filter on
|
||||
@ -43,10 +42,7 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
self.facets = facets;
|
||||
}
|
||||
|
||||
pub fn with_criteria(
|
||||
index: &'i store::Index,
|
||||
criteria: Criteria<'c>,
|
||||
) -> Self {
|
||||
pub fn with_criteria(index: &'i store::Index, criteria: Criteria<'c>) -> Self {
|
||||
QueryBuilder {
|
||||
criteria,
|
||||
searchable_attrs: None,
|
||||
@ -82,14 +78,11 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
reorders.insert_attribute(attribute);
|
||||
}
|
||||
|
||||
pub fn query(
|
||||
self,
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
) -> MResult<SortResult> {
|
||||
let facets_docids = match self.facet_filter {
|
||||
Some(facets) => {
|
||||
/// returns the documents ids associated with a facet filter by computing the union and
|
||||
/// intersection of the document sets
|
||||
fn facets_docids(&self, reader: &MainReader) -> MResult<Option<SetBuf<DocumentId>>> {
|
||||
let facet_docids = match self.facet_filter {
|
||||
Some(ref facets) => {
|
||||
let mut ands = Vec::with_capacity(facets.len());
|
||||
let mut ors = Vec::new();
|
||||
for f in facets.deref() {
|
||||
@ -97,48 +90,48 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
Either::Left(keys) => {
|
||||
ors.reserve(keys.len());
|
||||
for key in keys {
|
||||
let docids = self.index.facets.facet_document_ids(reader, &key)?.unwrap_or_default();
|
||||
let docids = self
|
||||
.index
|
||||
.facets
|
||||
.facet_document_ids(reader, &key)?
|
||||
.unwrap_or_default();
|
||||
ors.push(docids);
|
||||
}
|
||||
let sets: Vec<_> = ors.iter().map(Cow::deref).collect();
|
||||
let sets: Vec<_> = ors.iter().map(|(_, i)| i).map(Cow::deref).collect();
|
||||
let or_result = sdset::multi::OpBuilder::from_vec(sets).union().into_set_buf();
|
||||
ands.push(Cow::Owned(or_result));
|
||||
ors.clear();
|
||||
}
|
||||
Either::Right(key) => {
|
||||
match self.index.facets.facet_document_ids(reader, &key)? {
|
||||
Some(docids) => ands.push(docids),
|
||||
Some((_name, docids)) => ands.push(docids),
|
||||
// no candidates for search, early return.
|
||||
None => return Ok(SortResult::default()),
|
||||
None => return Ok(Some(SetBuf::default())),
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
let ands: Vec<_> = ands.iter().map(Cow::deref).collect();
|
||||
Some(sdset::multi::OpBuilder::from_vec(ands).intersection().into_set_buf())
|
||||
}
|
||||
None => None
|
||||
};
|
||||
|
||||
// for each field to retrieve the count for, create an HashMap associating the attribute
|
||||
// value to a set of matching documents. The HashMaps are them collected in another
|
||||
// HashMap, associating each HashMap to it's field.
|
||||
let facet_count_docids = match self.facets {
|
||||
Some(field_ids) => {
|
||||
let mut facet_count_map = HashMap::new();
|
||||
for (field_id, field_name) in field_ids {
|
||||
let mut key_map = HashMap::new();
|
||||
for pair in self.index.facets.field_document_ids(reader, field_id)? {
|
||||
let (facet_key, document_ids) = pair?;
|
||||
let value = facet_key.value();
|
||||
key_map.insert(value.to_string(), document_ids);
|
||||
}
|
||||
facet_count_map.insert(field_name, key_map);
|
||||
}
|
||||
Some(facet_count_map)
|
||||
Some(
|
||||
sdset::multi::OpBuilder::from_vec(ands)
|
||||
.intersection()
|
||||
.into_set_buf(),
|
||||
)
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
Ok(facet_docids)
|
||||
}
|
||||
|
||||
fn standard_query(self, reader: &MainReader, query: &str, range: Range<usize>) -> MResult<SortResult> {
|
||||
let facets_docids = match self.facets_docids(reader)? {
|
||||
Some(ids) if ids.is_empty() => return Ok(SortResult::default()),
|
||||
other => other
|
||||
};
|
||||
// for each field to retrieve the count for, create an HashMap associating the attribute
|
||||
// value to a set of matching documents. The HashMaps are them collected in another
|
||||
// HashMap, associating each HashMap to it's field.
|
||||
let facet_count_docids = self.facet_count_docids(reader)?;
|
||||
|
||||
match self.distinct {
|
||||
Some((distinct, distinct_size)) => bucket_sort_with_distinct(
|
||||
@ -152,12 +145,7 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
distinct_size,
|
||||
self.criteria,
|
||||
self.searchable_attrs,
|
||||
self.index.main,
|
||||
self.index.postings_lists,
|
||||
self.index.documents_fields_counts,
|
||||
self.index.synonyms,
|
||||
self.index.prefix_documents_cache,
|
||||
self.index.prefix_postings_lists_cache,
|
||||
self.index,
|
||||
),
|
||||
None => bucket_sort(
|
||||
reader,
|
||||
@ -168,15 +156,121 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
self.filter,
|
||||
self.criteria,
|
||||
self.searchable_attrs,
|
||||
self.index.main,
|
||||
self.index.postings_lists,
|
||||
self.index.documents_fields_counts,
|
||||
self.index.synonyms,
|
||||
self.index.prefix_documents_cache,
|
||||
self.index.prefix_postings_lists_cache,
|
||||
self.index,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn placeholder_query(self, reader: &heed::RoTxn<MainT>, range: Range<usize>) -> MResult<SortResult> {
|
||||
match self.facets_docids(reader)? {
|
||||
Some(docids) => {
|
||||
// We sort the docids from facets according to the criteria set by the user
|
||||
let mut sorted_docids = docids.clone().into_vec();
|
||||
let mut sort_result = match self.index.main.ranked_map(reader)? {
|
||||
Some(ranked_map) => {
|
||||
placeholder_document_sort(&mut sorted_docids, self.index, reader, &ranked_map)?;
|
||||
self.sort_result_from_docids(&sorted_docids, range)
|
||||
},
|
||||
// if we can't perform a sort, we return documents unordered
|
||||
None => self.sort_result_from_docids(&docids, range),
|
||||
};
|
||||
|
||||
if let Some(f) = self.facet_count_docids(reader)? {
|
||||
sort_result.exhaustive_facets_count = Some(true);
|
||||
sort_result.facets = Some(facet_count(f, &docids));
|
||||
}
|
||||
|
||||
Ok(sort_result)
|
||||
},
|
||||
None => {
|
||||
match self.index.main.sorted_document_ids_cache(reader)? {
|
||||
// build result from cached document ids
|
||||
Some(docids) => {
|
||||
let mut sort_result = self.sort_result_from_docids(&docids, range);
|
||||
|
||||
if let Some(f) = self.facet_count_docids(reader)? {
|
||||
sort_result.exhaustive_facets_count = Some(true);
|
||||
// document ids are not sorted in natural order, we need to construct a new set
|
||||
let document_set = SetBuf::from_dirty(Vec::from(docids));
|
||||
sort_result.facets = Some(facet_count(f, &document_set));
|
||||
}
|
||||
|
||||
Ok(sort_result)
|
||||
},
|
||||
// no document id cached, return empty result
|
||||
None => Ok(SortResult::default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn facet_count_docids<'a>(&self, reader: &'a MainReader) -> MResult<Option<HashMap<String, HashMap<String, (&'a str, Cow<'a, Set<DocumentId>>)>>>> {
|
||||
match self.facets {
|
||||
Some(ref field_ids) => {
|
||||
let mut facet_count_map = HashMap::new();
|
||||
for (field_id, field_name) in field_ids {
|
||||
let mut key_map = HashMap::new();
|
||||
for pair in self.index.facets.field_document_ids(reader, *field_id)? {
|
||||
let (facet_key, document_ids) = pair?;
|
||||
let value = facet_key.value();
|
||||
key_map.insert(value.to_string(), document_ids);
|
||||
}
|
||||
facet_count_map.insert(field_name.clone(), key_map);
|
||||
}
|
||||
Ok(Some(facet_count_map))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_result_from_docids(&self, docids: &[DocumentId], range: Range<usize>) -> SortResult {
|
||||
let mut sort_result = SortResult::default();
|
||||
let mut result = match self.filter {
|
||||
Some(ref filter) => docids
|
||||
.iter()
|
||||
.filter(|item| (filter)(**item))
|
||||
.skip(range.start)
|
||||
.take(range.end - range.start)
|
||||
.map(|&id| Document::from_highlights(id, &[]))
|
||||
.collect::<Vec<_>>(),
|
||||
None => docids
|
||||
.iter()
|
||||
.skip(range.start)
|
||||
.take(range.end - range.start)
|
||||
.map(|&id| Document::from_highlights(id, &[]))
|
||||
.collect::<Vec<_>>(),
|
||||
};
|
||||
|
||||
// distinct is set, remove duplicates with disctinct function
|
||||
if let Some((distinct, distinct_size)) = &self.distinct {
|
||||
let mut distinct_map = DistinctMap::new(*distinct_size);
|
||||
let mut distinct_map = BufferedDistinctMap::new(&mut distinct_map);
|
||||
result.retain(|doc| {
|
||||
let id = doc.id;
|
||||
let key = (distinct)(id);
|
||||
match key {
|
||||
Some(key) => distinct_map.register(key),
|
||||
None => distinct_map.register_without_key(),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
sort_result.documents = result;
|
||||
sort_result.nb_hits = docids.len();
|
||||
sort_result
|
||||
}
|
||||
|
||||
pub fn query(
|
||||
self,
|
||||
reader: &heed::RoTxn<MainT>,
|
||||
query: Option<&str>,
|
||||
range: Range<usize>,
|
||||
) -> MResult<SortResult> {
|
||||
match query {
|
||||
Some(query) => self.standard_query(reader, query, range),
|
||||
None => self.placeholder_query(reader, range),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -191,12 +285,12 @@ mod tests {
|
||||
use sdset::SetBuf;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::DocIndex;
|
||||
use crate::Document;
|
||||
use crate::automaton::normalize_str;
|
||||
use crate::bucket_sort::SimpleMatch;
|
||||
use crate::database::{Database, DatabaseOptions};
|
||||
use crate::store::Index;
|
||||
use crate::DocIndex;
|
||||
use crate::Document;
|
||||
use meilisearch_schema::Schema;
|
||||
|
||||
fn set_from_stream<'f, I, S>(stream: I) -> fst::Set<Vec<u8>>
|
||||
@ -376,7 +470,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "iphone from apple", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("iphone from apple"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -399,7 +493,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("hello"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -410,7 +504,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("bonjour"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -514,7 +608,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "hello", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("hello"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -535,7 +629,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "bonjour", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("bonjour"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -556,7 +650,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "salut", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("salut"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -602,7 +696,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -624,7 +718,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NYC subway"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -666,7 +760,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NY"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(2), matches, .. }) => {
|
||||
@ -690,7 +784,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "new york", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("new york"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -724,7 +818,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -741,7 +835,8 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "new york subway", 0..20).unwrap();
|
||||
let SortResult { documents, .. } =
|
||||
builder.query(&reader, Some("new york subway"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -789,7 +884,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -811,7 +906,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NYC subway"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -864,7 +959,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway broken", 0..20).unwrap();
|
||||
let SortResult {documents, .. } = builder.query(&reader, Some("NY subway broken"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -880,7 +975,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NYC subway", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NYC subway"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -937,7 +1032,7 @@ mod tests {
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder
|
||||
.query(&reader, "new york underground train broken", 0..20)
|
||||
.query(&reader, Some("new york underground train broken"), 0..20)
|
||||
.unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
@ -967,7 +1062,7 @@ mod tests {
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder
|
||||
.query(&reader, "new york city underground train broken", 0..20)
|
||||
.query(&reader, Some("new york city underground train broken"), 0..20)
|
||||
.unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
@ -1010,7 +1105,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "new york big ", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("new york big "), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1044,7 +1139,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "NY subway ", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("NY subway "), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1095,7 +1190,7 @@ mod tests {
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult { documents, .. } = builder
|
||||
.query(&reader, "new york city long subway cool ", 0..20)
|
||||
.query(&reader, Some("new york city long subway cool "), 0..20)
|
||||
.unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
@ -1127,7 +1222,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "telephone", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("telephone"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1144,7 +1239,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "téléphone", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("téléphone"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1161,7 +1256,7 @@ mod tests {
|
||||
assert_matches!(iter.next(), None);
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "télephone", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("télephone"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(1), matches, .. }) => {
|
||||
@ -1188,7 +1283,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "i phone case", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("i phone case"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1217,7 +1312,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("searchengine"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1257,7 +1352,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("searchengine"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
@ -1289,7 +1384,7 @@ mod tests {
|
||||
let reader = db.main_read_txn().unwrap();
|
||||
|
||||
let builder = store.query_builder();
|
||||
let SortResult {documents, .. } = builder.query(&reader, "searchengine", 0..20).unwrap();
|
||||
let SortResult { documents, .. } = builder.query(&reader, Some("searchengine"), 0..20).unwrap();
|
||||
let mut iter = documents.into_iter();
|
||||
|
||||
assert_matches!(iter.next(), Some(Document { id: DocumentId(0), matches, .. }) => {
|
||||
|
@ -19,6 +19,7 @@ impl QueryWordsMapper {
|
||||
QueryWordsMapper { originals, mappings: HashMap::new() }
|
||||
}
|
||||
|
||||
#[allow(clippy::len_zero)]
|
||||
pub fn declare<I, A>(&mut self, range: Range<usize>, id: QueryId, replacement: I)
|
||||
where I: IntoIterator<Item = A>,
|
||||
A: ToString,
|
||||
@ -53,7 +54,7 @@ impl QueryWordsMapper {
|
||||
}
|
||||
|
||||
{
|
||||
let replacement = replacement[common_left..replacement.len() - common_right].iter().cloned().collect();
|
||||
let replacement = replacement[common_left..replacement.len() - common_right].to_vec();
|
||||
self.mappings.insert(id + common_left, (range.clone(), replacement));
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,7 @@ fn index_token<A>(
|
||||
) -> bool
|
||||
where A: AsRef<[u8]>,
|
||||
{
|
||||
if token.word_index >= word_limit {
|
||||
if token.index >= word_limit {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -277,4 +277,36 @@ mod tests {
|
||||
.get(&"🇯🇵".to_owned().into_bytes())
|
||||
.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// test sample from 807
|
||||
fn very_long_text() {
|
||||
let mut indexer = RawIndexer::new(fst::Set::default());
|
||||
let indexed_pos = IndexedPos(0);
|
||||
let docid = DocumentId(0);
|
||||
let text = " The locations block is the most powerful, and potentially most involved, section of the .platform.app.yaml file. It allows you to control how the application container responds to incoming requests at a very fine-grained level. Common patterns also vary between language containers due to the way PHP-FPM handles incoming requests.\nEach entry of the locations block is an absolute URI path (with leading /) and its value includes the configuration directives for how the web server should handle matching requests. That is, if your domain is example.com then '/' means “requests for example.com/”, while '/admin' means “requests for example.com/admin”. If multiple blocks could match an incoming request then the most-specific will apply.\nweb:locations:'/':# Rules for all requests that don't otherwise match....'/sites/default/files':# Rules for any requests that begin with /sites/default/files....The simplest possible locations configuration is one that simply passes all requests on to your application unconditionally:\nweb:locations:'/':passthru:trueThat is, all requests to /* should be forwarded to the process started by web.commands.start above. Note that for PHP containers the passthru key must specify what PHP file the request should be forwarded to, and must also specify a docroot under which the file lives. For example:\nweb:locations:'/':root:'web'passthru:'/app.php'This block will serve requests to / from the web directory in the application, and if a file doesn’t exist on disk then the request will be forwarded to the /app.php script.\nA full list of the possible subkeys for locations is below.\n root: The folder from which to serve static assets for this location relative to the application root. The application root is the directory in which the .platform.app.yaml file is located. Typical values for this property include public or web. Setting it to '' is not recommended, and its behavior may vary depending on the type of application. Absolute paths are not supported.\n passthru: Whether to forward disallowed and missing resources from this location to the application and can be true, false or an absolute URI path (with leading /). The default value is false. For non-PHP applications it will generally be just true or false. In a PHP application this will typically be the front controller such as /index.php or /app.php. This entry works similar to mod_rewrite under Apache. Note: If the value of passthru does not begin with the same value as the location key it is under, the passthru may evaluate to another entry. That may be useful when you want different cache settings for different paths, for instance, but want missing files in all of them to map back to the same front controller. See the example block below.\n index: The files to consider when serving a request for a directory: an array of file names or null. (typically ['index.html']). Note that in order for this to work, access to the static files named must be allowed by the allow or rules keys for this location.\n expires: How long to allow static assets from this location to be cached (this enables the Cache-Control and Expires headers) and can be a time or -1 for no caching (default). Times can be suffixed with “ms” (milliseconds), “s” (seconds), “m” (minutes), “h” (hours), “d” (days), “w” (weeks), “M” (months, 30d) or “y” (years, 365d).\n scripts: Whether to allow loading scripts in that location (true or false). This directive is only meaningful on PHP.\n allow: Whether to allow serving files which don’t match a rule (true or false, default: true).\n headers: Any additional headers to apply to static assets. This section is a mapping of header names to header values. Responses from the application aren’t affected, to avoid overlap with the application’s own ability to include custom headers in the response.\n rules: Specific overrides for a specific location. The key is a PCRE (regular expression) that is matched against the full request path.\n request_buffering: Most application servers do not support chunked requests (e.g. fpm, uwsgi), so Platform.sh enables request_buffering by default to handle them. That default configuration would look like this if it was present in .platform.app.yaml:\nweb:locations:'/':passthru:truerequest_buffering:enabled:truemax_request_size:250mIf the application server can already efficiently handle chunked requests, the request_buffering subkey can be modified to disable it entirely (enabled: false). Additionally, applications that frequently deal with uploads greater than 250MB in size can update the max_request_size key to the application’s needs. Note that modifications to request_buffering will need to be specified at each location where it is desired.\n ";
|
||||
indexer.index_text(docid, indexed_pos, text);
|
||||
let Indexed {
|
||||
words_doc_indexes, ..
|
||||
} = indexer.build();
|
||||
assert!(words_doc_indexes.get(&"buffering".to_owned().into_bytes()).is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn words_over_index_1000_not_indexed() {
|
||||
let mut indexer = RawIndexer::new(fst::Set::default());
|
||||
let indexed_pos = IndexedPos(0);
|
||||
let docid = DocumentId(0);
|
||||
let mut text = String::with_capacity(5000);
|
||||
for _ in 0..1000 {
|
||||
text.push_str("less ");
|
||||
}
|
||||
text.push_str("more");
|
||||
indexer.index_text(docid, indexed_pos, &text);
|
||||
let Indexed {
|
||||
words_doc_indexes, ..
|
||||
} = indexer.build();
|
||||
assert!(words_doc_indexes.get(&"less".to_owned().into_bytes()).is_some());
|
||||
assert!(words_doc_indexes.get(&"more".to_owned().into_bytes()).is_none());
|
||||
}
|
||||
}
|
||||
|
@ -10,8 +10,7 @@ use self::RankingRule::*;
|
||||
pub const DEFAULT_RANKING_RULES: [RankingRule; 6] = [Typo, Words, Proximity, Attribute, WordsPosition, Exactness];
|
||||
|
||||
static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
|
||||
let regex = regex::Regex::new(r"(asc|desc)\(([a-zA-Z0-9-_]*)\)").unwrap();
|
||||
regex
|
||||
regex::Regex::new(r"(asc|desc)\(([a-zA-Z0-9-_]*)\)").unwrap()
|
||||
});
|
||||
|
||||
#[derive(Default, Clone, Serialize, Deserialize)]
|
||||
@ -30,8 +29,6 @@ pub struct Settings {
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub accept_new_fields: Option<Option<bool>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub attributes_for_faceting: Option<Option<Vec<String>>>,
|
||||
}
|
||||
|
||||
@ -44,11 +41,11 @@ fn deserialize_some<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
pub fn into_update(&self) -> Result<SettingsUpdate, RankingRuleConversionError> {
|
||||
pub fn to_update(&self) -> Result<SettingsUpdate, RankingRuleConversionError> {
|
||||
let settings = self.clone();
|
||||
|
||||
let ranking_rules = match settings.ranking_rules {
|
||||
Some(Some(rules)) => UpdateState::Update(RankingRule::from_iter(rules.iter())?),
|
||||
Some(Some(rules)) => UpdateState::Update(RankingRule::try_from_iter(rules.iter())?),
|
||||
Some(None) => UpdateState::Clear,
|
||||
None => UpdateState::Nothing,
|
||||
};
|
||||
@ -61,7 +58,6 @@ impl Settings {
|
||||
displayed_attributes: settings.displayed_attributes.into(),
|
||||
stop_words: settings.stop_words.into(),
|
||||
synonyms: settings.synonyms.into(),
|
||||
accept_new_fields: settings.accept_new_fields.into(),
|
||||
attributes_for_faceting: settings.attributes_for_faceting.into(),
|
||||
})
|
||||
}
|
||||
@ -152,7 +148,7 @@ impl RankingRule {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_iter(rules: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Vec<RankingRule>, RankingRuleConversionError> {
|
||||
pub fn try_from_iter(rules: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Vec<RankingRule>, RankingRuleConversionError> {
|
||||
rules.into_iter()
|
||||
.map(|s| RankingRule::from_str(s.as_ref()))
|
||||
.collect()
|
||||
@ -168,7 +164,6 @@ pub struct SettingsUpdate {
|
||||
pub displayed_attributes: UpdateState<HashSet<String>>,
|
||||
pub stop_words: UpdateState<BTreeSet<String>>,
|
||||
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
|
||||
pub accept_new_fields: UpdateState<bool>,
|
||||
pub attributes_for_faceting: UpdateState<Vec<String>>,
|
||||
}
|
||||
|
||||
@ -182,7 +177,6 @@ impl Default for SettingsUpdate {
|
||||
displayed_attributes: UpdateState::Nothing,
|
||||
stop_words: UpdateState::Nothing,
|
||||
synonyms: UpdateState::Nothing,
|
||||
accept_new_fields: UpdateState::Nothing,
|
||||
attributes_for_faceting: UpdateState::Nothing,
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,14 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::mem;
|
||||
|
||||
use heed::{RwTxn, RoTxn, Result as ZResult, RoRange};
|
||||
use heed::{RwTxn, RoTxn, RoRange, types::Str, BytesEncode, BytesDecode};
|
||||
use sdset::{SetBuf, Set, SetOperation};
|
||||
|
||||
use meilisearch_types::DocumentId;
|
||||
use meilisearch_schema::FieldId;
|
||||
|
||||
use crate::MResult;
|
||||
use crate::database::MainT;
|
||||
use crate::facets::FacetKey;
|
||||
use super::cow_set::CowSet;
|
||||
@ -14,45 +16,82 @@ use super::cow_set::CowSet;
|
||||
/// contains facet info
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Facets {
|
||||
pub(crate) facets: heed::Database<FacetKey, CowSet<DocumentId>>,
|
||||
pub(crate) facets: heed::Database<FacetKey, FacetData>,
|
||||
}
|
||||
|
||||
pub struct FacetData;
|
||||
|
||||
impl<'a> BytesEncode<'a> for FacetData {
|
||||
type EItem = (&'a str, &'a Set<DocumentId>);
|
||||
|
||||
fn bytes_encode(item: &'a Self::EItem) -> Option<Cow<'a, [u8]>> {
|
||||
// get size of the first item
|
||||
let first_size = item.0.as_bytes().len();
|
||||
let size = mem::size_of::<u64>()
|
||||
+ first_size
|
||||
+ item.1.len() * mem::size_of::<DocumentId>();
|
||||
let mut buffer = Vec::with_capacity(size);
|
||||
// encode the length of the first item
|
||||
buffer.extend_from_slice(&first_size.to_be_bytes());
|
||||
buffer.extend_from_slice(Str::bytes_encode(&item.0)?.as_ref());
|
||||
let second_slice = CowSet::bytes_encode(&item.1)?;
|
||||
buffer.extend_from_slice(second_slice.as_ref());
|
||||
Some(Cow::Owned(buffer))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> BytesDecode<'a> for FacetData {
|
||||
type DItem = (&'a str, Cow<'a, Set<DocumentId>>);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
const LEN: usize = mem::size_of::<u64>();
|
||||
let mut size_buf = [0; LEN];
|
||||
size_buf.copy_from_slice(bytes.get(0..LEN)?);
|
||||
// decode size of the first item from the bytes
|
||||
let first_size = usize::from_be_bytes(size_buf);
|
||||
// decode first and second items
|
||||
let first_item = Str::bytes_decode(bytes.get(LEN..(LEN + first_size))?)?;
|
||||
let second_item = CowSet::bytes_decode(bytes.get((LEN + first_size)..)?)?;
|
||||
Some((first_item, second_item))
|
||||
}
|
||||
}
|
||||
|
||||
impl Facets {
|
||||
// we use sdset::SetBuf to ensure the docids are sorted.
|
||||
pub fn put_facet_document_ids(&self, writer: &mut RwTxn<MainT>, facet_key: FacetKey, doc_ids: &Set<DocumentId>) -> ZResult<()> {
|
||||
self.facets.put(writer, &facet_key, doc_ids)
|
||||
pub fn put_facet_document_ids(&self, writer: &mut RwTxn<MainT>, facet_key: FacetKey, doc_ids: &Set<DocumentId>, facet_value: &str) -> MResult<()> {
|
||||
Ok(self.facets.put(writer, &facet_key, &(facet_value, doc_ids))?)
|
||||
}
|
||||
|
||||
pub fn field_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, field_id: FieldId) -> ZResult<RoRange<'txn, FacetKey, CowSet<DocumentId>>> {
|
||||
self.facets.prefix_iter(reader, &FacetKey::new(field_id, String::new()))
|
||||
pub fn field_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, field_id: FieldId) -> MResult<RoRange<'txn, FacetKey, FacetData>> {
|
||||
Ok(self.facets.prefix_iter(reader, &FacetKey::new(field_id, String::new()))?)
|
||||
}
|
||||
|
||||
pub fn facet_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, facet_key: &FacetKey) -> ZResult<Option<Cow<'txn, Set<DocumentId>>>> {
|
||||
self.facets.get(reader, &facet_key)
|
||||
pub fn facet_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, facet_key: &FacetKey) -> MResult<Option<(&'txn str,Cow<'txn, Set<DocumentId>>)>> {
|
||||
Ok(self.facets.get(reader, &facet_key)?)
|
||||
}
|
||||
|
||||
/// updates the facets store, revmoving the documents from the facets provided in the
|
||||
/// `facet_map` argument
|
||||
pub fn remove(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
|
||||
for (key, document_ids) in facet_map {
|
||||
if let Some(old) = self.facets.get(writer, &key)? {
|
||||
pub fn remove(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, (String, Vec<DocumentId>)>) -> MResult<()> {
|
||||
for (key, (name, document_ids)) in facet_map {
|
||||
if let Some((_, old)) = self.facets.get(writer, &key)? {
|
||||
let to_remove = SetBuf::from_dirty(document_ids);
|
||||
let new = sdset::duo::OpBuilder::new(old.as_ref(), to_remove.as_set()).difference().into_set_buf();
|
||||
self.facets.put(writer, &key, new.as_set())?;
|
||||
self.facets.put(writer, &key, &(&name, new.as_set()))?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
|
||||
for (key, document_ids) in facet_map {
|
||||
pub fn add(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, (String, Vec<DocumentId>)>) -> MResult<()> {
|
||||
for (key, (facet_name, document_ids)) in facet_map {
|
||||
let set = SetBuf::from_dirty(document_ids);
|
||||
self.put_facet_document_ids(writer, key, set.as_set())?;
|
||||
self.put_facet_document_ids(writer, key, set.as_set(), &facet_name)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
|
||||
self.facets.clear(writer)
|
||||
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
|
||||
Ok(self.facets.clear(writer)?)
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
|
||||
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str, CowSlice};
|
||||
use meilisearch_schema::{FieldId, Schema};
|
||||
use meilisearch_types::DocumentId;
|
||||
use sdset::Set;
|
||||
@ -25,6 +25,7 @@ const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents";
|
||||
const RANKED_MAP_KEY: &str = "ranked-map";
|
||||
const RANKING_RULES_KEY: &str = "ranking-rules";
|
||||
const SCHEMA_KEY: &str = "schema";
|
||||
const SORTED_DOCUMENT_IDS_CACHE_KEY: &str = "sorted-document-ids-cache";
|
||||
const STOP_WORDS_KEY: &str = "stop-words";
|
||||
const SYNONYMS_KEY: &str = "synonyms";
|
||||
const UPDATED_AT_KEY: &str = "updated-at";
|
||||
@ -165,6 +166,14 @@ impl Main {
|
||||
Ok(self.main.put::<_, Str, ByteSlice>(writer, WORDS_KEY, fst.as_fst().as_bytes())?)
|
||||
}
|
||||
|
||||
pub fn put_sorted_document_ids_cache(self, writer: &mut heed::RwTxn<MainT>, documents_ids: &[DocumentId]) -> MResult<()> {
|
||||
Ok(self.main.put::<_, Str, CowSlice<DocumentId>>(writer, SORTED_DOCUMENT_IDS_CACHE_KEY, documents_ids)?)
|
||||
}
|
||||
|
||||
pub fn sorted_document_ids_cache(self, reader: &heed::RoTxn<MainT>) -> MResult<Option<Cow<[DocumentId]>>> {
|
||||
Ok(self.main.get::<_, Str, CowSlice<DocumentId>>(reader, SORTED_DOCUMENT_IDS_CACHE_KEY)?)
|
||||
}
|
||||
|
||||
pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> MResult<()> {
|
||||
Ok(self.main.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)?)
|
||||
}
|
||||
@ -287,10 +296,10 @@ impl Main {
|
||||
}
|
||||
|
||||
pub fn distinct_attribute(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<FieldId>> {
|
||||
if let Some(value) = self.main.get::<_, Str, OwnedType<u16>>(reader, DISTINCT_ATTRIBUTE_KEY)? {
|
||||
return Ok(Some(FieldId(value.to_owned())))
|
||||
match self.main.get::<_, Str, OwnedType<u16>>(reader, DISTINCT_ATTRIBUTE_KEY)? {
|
||||
Some(value) => Ok(Some(FieldId(value.to_owned()))),
|
||||
None => Ok(None),
|
||||
}
|
||||
return Ok(None)
|
||||
}
|
||||
|
||||
pub fn put_distinct_attribute(self, writer: &mut heed::RwTxn<MainT>, value: FieldId) -> MResult<()> {
|
||||
|
@ -109,6 +109,7 @@ pub fn push_documents_addition<D: serde::Serialize>(
|
||||
Ok(last_update_id)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn index_document<A>(
|
||||
writer: &mut heed::RwTxn<MainT>,
|
||||
documents_fields: DocumentsFields,
|
||||
@ -170,15 +171,23 @@ pub fn apply_addition<'a, 'b>(
|
||||
let mut new_internal_docids = Vec::with_capacity(new_documents.len());
|
||||
|
||||
for mut document in new_documents {
|
||||
let external_docids_get = |docid: &str| {
|
||||
match (external_docids.get(docid), new_external_docids.get(docid)) {
|
||||
(_, Some(&id))
|
||||
| (Some(id), _) => Some(id as u32),
|
||||
(None, None) => None,
|
||||
}
|
||||
};
|
||||
|
||||
let (internal_docid, external_docid) =
|
||||
extract_document_id(
|
||||
&primary_key,
|
||||
&document,
|
||||
&external_docids,
|
||||
&external_docids_get,
|
||||
&mut available_ids,
|
||||
)?;
|
||||
|
||||
new_external_docids.insert(external_docid, internal_docid.0);
|
||||
new_external_docids.insert(external_docid, internal_docid.0 as u64);
|
||||
new_internal_docids.push(internal_docid);
|
||||
|
||||
if partial {
|
||||
@ -216,7 +225,7 @@ pub fn apply_addition<'a, 'b>(
|
||||
let mut indexer = RawIndexer::new(stop_words);
|
||||
|
||||
// For each document in this update
|
||||
for (document_id, document) in documents_additions {
|
||||
for (document_id, document) in &documents_additions {
|
||||
// For each key-value pair in the document.
|
||||
for (attribute, value) in document {
|
||||
let field_id = schema.insert_and_index(&attribute)?;
|
||||
@ -228,7 +237,7 @@ pub fn apply_addition<'a, 'b>(
|
||||
&mut indexer,
|
||||
&schema,
|
||||
field_id,
|
||||
document_id,
|
||||
*document_id,
|
||||
&value,
|
||||
)?;
|
||||
}
|
||||
@ -256,6 +265,10 @@ pub fn apply_addition<'a, 'b>(
|
||||
index.facets.add(writer, facet_map)?;
|
||||
}
|
||||
|
||||
// update is finished; update sorted document id cache with new state
|
||||
let mut document_ids = index.main.internal_docids(writer)?.to_vec();
|
||||
super::cache_document_ids_sorted(writer, &ranked_map, index, &mut document_ids)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -312,8 +325,8 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
||||
index.facets.add(writer, facet_map)?;
|
||||
}
|
||||
// ^-- https://github.com/meilisearch/MeiliSearch/pull/631#issuecomment-626624470 --v
|
||||
for document_id in documents_ids_to_reindex {
|
||||
for result in index.documents_fields.document_fields(writer, document_id)? {
|
||||
for document_id in &documents_ids_to_reindex {
|
||||
for result in index.documents_fields.document_fields(writer, *document_id)? {
|
||||
let (field_id, bytes) = result?;
|
||||
let value: Value = serde_json::from_slice(bytes)?;
|
||||
ram_store.insert((document_id, field_id), value);
|
||||
@ -329,7 +342,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
||||
&mut indexer,
|
||||
&schema,
|
||||
field_id,
|
||||
document_id,
|
||||
*document_id,
|
||||
&value,
|
||||
)?;
|
||||
}
|
||||
@ -353,6 +366,10 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
||||
index.facets.add(writer, facet_map)?;
|
||||
}
|
||||
|
||||
// update is finished; update sorted document id cache with new state
|
||||
let mut document_ids = index.main.internal_docids(writer)?.to_vec();
|
||||
super::cache_document_ids_sorted(writer, &ranked_map, index, &mut document_ids)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@ use crate::database::{UpdateEvent, UpdateEventsEmitter};
|
||||
use crate::facets;
|
||||
use crate::store;
|
||||
use crate::update::{next_update_id, compute_short_prefixes, Update};
|
||||
use crate::{DocumentId, Error, MResult, RankedMap};
|
||||
use crate::{DocumentId, Error, MResult, RankedMap, MainWriter, Index};
|
||||
|
||||
pub struct DocumentsDeletion {
|
||||
updates_store: store::Updates,
|
||||
@ -153,8 +153,8 @@ pub fn apply_documents_deletion(
|
||||
}
|
||||
|
||||
let deleted_documents_len = deleted_documents.len() as u64;
|
||||
for id in deleted_documents {
|
||||
index.docs_words.del_doc_words(writer, id)?;
|
||||
for id in &deleted_documents {
|
||||
index.docs_words.del_doc_words(writer, *id)?;
|
||||
}
|
||||
|
||||
let removed_words = fst::Set::from_iter(removed_words).unwrap();
|
||||
@ -180,5 +180,28 @@ pub fn apply_documents_deletion(
|
||||
|
||||
compute_short_prefixes(writer, &words, index)?;
|
||||
|
||||
// update is finished; update sorted document id cache with new state
|
||||
document_cache_remove_deleted(writer, index, &ranked_map, &deleted_documents)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// rebuilds the document id cache by either removing deleted documents from the existing cache,
|
||||
/// and generating a new one from docs in store
|
||||
fn document_cache_remove_deleted(writer: &mut MainWriter, index: &Index, ranked_map: &RankedMap, documents_to_delete: &HashSet<DocumentId>) -> MResult<()> {
|
||||
let new_cache = match index.main.sorted_document_ids_cache(writer)? {
|
||||
// only keep documents that are not in the list of deleted documents. Order is preserved,
|
||||
// no need to resort
|
||||
Some(old_cache) => {
|
||||
old_cache.iter().filter(|docid| !documents_to_delete.contains(docid)).cloned().collect::<Vec<_>>()
|
||||
}
|
||||
// couldn't find cached documents, try building a new cache from documents in store
|
||||
None => {
|
||||
let mut document_ids = index.main.internal_docids(writer)?.to_vec();
|
||||
super::cache_document_ids_sorted(writer, ranked_map, index, &mut document_ids)?;
|
||||
document_ids
|
||||
}
|
||||
};
|
||||
index.main.put_sorted_document_ids_cache(writer, &new_cache)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ use meilisearch_types::DocumentId;
|
||||
use ordered_float::OrderedFloat;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{Number, FstMapCow};
|
||||
use crate::Number;
|
||||
use crate::raw_indexer::RawIndexer;
|
||||
use crate::serde::SerializerError;
|
||||
use crate::store::DiscoverIds;
|
||||
@ -98,15 +98,17 @@ pub fn value_to_number(value: &Value) -> Option<Number> {
|
||||
|
||||
/// Validates a string representation to be a correct document id and returns
|
||||
/// the corresponding id or generate a new one, this is the way we produce documents ids.
|
||||
pub fn discover_document_id(
|
||||
pub fn discover_document_id<F>(
|
||||
docid: &str,
|
||||
external_docids: &FstMapCow,
|
||||
external_docids_get: F,
|
||||
available_docids: &mut DiscoverIds<'_>,
|
||||
) -> Result<DocumentId, SerializerError>
|
||||
where
|
||||
F: FnOnce(&str) -> Option<u32>
|
||||
{
|
||||
if docid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') {
|
||||
match external_docids.get(docid) {
|
||||
Some(id) => Ok(DocumentId(id as u32)),
|
||||
match external_docids_get(docid) {
|
||||
Some(id) => Ok(DocumentId(id)),
|
||||
None => {
|
||||
let internal_id = available_docids.next().expect("no more ids available");
|
||||
Ok(internal_id)
|
||||
@ -118,12 +120,14 @@ pub fn discover_document_id(
|
||||
}
|
||||
|
||||
/// Extracts and validates the document id of a document.
|
||||
pub fn extract_document_id(
|
||||
pub fn extract_document_id<F>(
|
||||
primary_key: &str,
|
||||
document: &IndexMap<String, Value>,
|
||||
external_docids: &FstMapCow,
|
||||
external_docids_get: F,
|
||||
available_docids: &mut DiscoverIds<'_>,
|
||||
) -> Result<(DocumentId, String), SerializerError>
|
||||
where
|
||||
F: FnOnce(&str) -> Option<u32>
|
||||
{
|
||||
match document.get(primary_key) {
|
||||
Some(value) => {
|
||||
@ -132,7 +136,7 @@ pub fn extract_document_id(
|
||||
Value::String(string) => string.clone(),
|
||||
_ => return Err(SerializerError::InvalidDocumentIdFormat),
|
||||
};
|
||||
discover_document_id(&docid, external_docids, available_docids).map(|id| (id, docid))
|
||||
discover_document_id(&docid, external_docids_get, available_docids).map(|id| (id, docid))
|
||||
}
|
||||
None => Err(SerializerError::DocumentIdNotFound),
|
||||
}
|
||||
|
@ -24,7 +24,10 @@ use sdset::Set;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{store, MResult};
|
||||
use meilisearch_error::ErrorCode;
|
||||
use meilisearch_types::DocumentId;
|
||||
|
||||
use crate::{store, MResult, RankedMap};
|
||||
use crate::database::{MainT, UpdateT};
|
||||
use crate::settings::SettingsUpdate;
|
||||
|
||||
@ -72,7 +75,7 @@ impl Update {
|
||||
|
||||
fn settings(data: SettingsUpdate) -> Update {
|
||||
Update {
|
||||
data: UpdateData::Settings(data),
|
||||
data: UpdateData::Settings(Box::new(data)),
|
||||
enqueued_at: Utc::now(),
|
||||
}
|
||||
}
|
||||
@ -85,7 +88,7 @@ pub enum UpdateData {
|
||||
DocumentsAddition(Vec<IndexMap<String, Value>>),
|
||||
DocumentsPartial(Vec<IndexMap<String, Value>>),
|
||||
DocumentsDeletion(Vec<String>),
|
||||
Settings(SettingsUpdate)
|
||||
Settings(Box<SettingsUpdate>)
|
||||
}
|
||||
|
||||
impl UpdateData {
|
||||
@ -117,7 +120,7 @@ pub enum UpdateType {
|
||||
DocumentsAddition { number: usize },
|
||||
DocumentsPartial { number: usize },
|
||||
DocumentsDeletion { number: usize },
|
||||
Settings { settings: SettingsUpdate },
|
||||
Settings { settings: Box<SettingsUpdate> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@ -128,6 +131,12 @@ pub struct ProcessedUpdateResult {
|
||||
pub update_type: UpdateType,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error_type: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error_code: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error_link: Option<String>,
|
||||
pub duration: f64, // in seconds
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
@ -273,7 +282,7 @@ pub fn update_task<'a, 'b>(
|
||||
let result = apply_settings_update(
|
||||
writer,
|
||||
index,
|
||||
settings,
|
||||
*settings,
|
||||
);
|
||||
|
||||
(update_type, result, start.elapsed())
|
||||
@ -288,7 +297,10 @@ pub fn update_task<'a, 'b>(
|
||||
let status = ProcessedUpdateResult {
|
||||
update_id,
|
||||
update_type,
|
||||
error: result.map_err(|e| e.to_string()).err(),
|
||||
error: result.as_ref().map_err(|e| e.to_string()).err(),
|
||||
error_code: result.as_ref().map_err(|e| e.error_name()).err(),
|
||||
error_type: result.as_ref().map_err(|e| e.error_type()).err(),
|
||||
error_link: result.as_ref().map_err(|e| e.error_url()).err(),
|
||||
duration: duration.as_secs_f64(),
|
||||
enqueued_at,
|
||||
processed_at: Utc::now(),
|
||||
@ -360,3 +372,13 @@ where A: AsRef<[u8]>,
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cache_document_ids_sorted(
|
||||
writer: &mut heed::RwTxn<MainT>,
|
||||
ranked_map: &RankedMap,
|
||||
index: &store::Index,
|
||||
document_ids: &mut [DocumentId],
|
||||
) -> MResult<()> {
|
||||
crate::bucket_sort::placeholder_document_sort(document_ids, index, writer, ranked_map)?;
|
||||
index.main.put_sorted_document_ids_cache(writer, &document_ids)
|
||||
}
|
||||
|
@ -68,19 +68,13 @@ pub fn apply_settings_update(
|
||||
UpdateState::Nothing => (),
|
||||
}
|
||||
|
||||
match settings.accept_new_fields {
|
||||
UpdateState::Update(v) => {
|
||||
schema.set_accept_new_fields(v);
|
||||
},
|
||||
UpdateState::Clear => {
|
||||
schema.set_accept_new_fields(true);
|
||||
},
|
||||
UpdateState::Nothing => (),
|
||||
}
|
||||
|
||||
match settings.searchable_attributes.clone() {
|
||||
UpdateState::Update(v) => {
|
||||
if v.iter().any(|e| e == "*") || v.is_empty() {
|
||||
schema.set_all_fields_as_indexed();
|
||||
} else {
|
||||
schema.update_indexed(v)?;
|
||||
}
|
||||
must_reindex = true;
|
||||
},
|
||||
UpdateState::Clear => {
|
||||
@ -90,7 +84,13 @@ pub fn apply_settings_update(
|
||||
UpdateState::Nothing => (),
|
||||
}
|
||||
match settings.displayed_attributes.clone() {
|
||||
UpdateState::Update(v) => schema.update_displayed(v)?,
|
||||
UpdateState::Update(v) => {
|
||||
if v.contains("*") || v.is_empty() {
|
||||
schema.set_all_fields_as_displayed();
|
||||
} else {
|
||||
schema.update_displayed(v)?
|
||||
}
|
||||
},
|
||||
UpdateState::Clear => {
|
||||
schema.set_all_fields_as_displayed();
|
||||
},
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-error"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
authors = ["marin <postma.marin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
|
@ -26,6 +26,7 @@ pub trait ErrorCode: std::error::Error {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
enum ErrorType {
|
||||
InternalError,
|
||||
InvalidRequestError,
|
||||
@ -85,33 +86,40 @@ impl Code {
|
||||
|
||||
match self {
|
||||
// index related errors
|
||||
CreateIndex => ErrCode::invalid("create_index", StatusCode::BAD_REQUEST),
|
||||
// create index is thrown on internal error while creating an index.
|
||||
CreateIndex => ErrCode::internal("index_creation_failed", StatusCode::BAD_REQUEST),
|
||||
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::BAD_REQUEST),
|
||||
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND), InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
|
||||
OpenIndex => ErrCode::internal("open_index", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
// thrown when requesting an unexisting index
|
||||
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
|
||||
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
|
||||
OpenIndex => ErrCode::internal("index_not_accessible", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
|
||||
// invalid state error
|
||||
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
MissingPrimaryKey => ErrCode::internal("missing_primary_key", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
PrimaryKeyAlreadyPresent => ErrCode::internal("primary_key_already_present", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
// thrown when no primary key has been set
|
||||
MissingPrimaryKey => ErrCode::invalid("missing_primary_key", StatusCode::BAD_REQUEST),
|
||||
// error thrown when trying to set an already existing primary key
|
||||
PrimaryKeyAlreadyPresent => ErrCode::invalid("primary_key_already_present", StatusCode::BAD_REQUEST),
|
||||
|
||||
// invalid document
|
||||
MaxFieldsLimitExceeded => ErrCode::invalid("max_field_limit_exceeded", StatusCode::BAD_REQUEST),
|
||||
MaxFieldsLimitExceeded => ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST),
|
||||
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
|
||||
|
||||
// error related to facets
|
||||
Facet => ErrCode::invalid("invalid_facet", StatusCode::BAD_REQUEST),
|
||||
// error related to filters
|
||||
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
|
||||
|
||||
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
||||
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
||||
DocumentNotFound => ErrCode::internal("document_not_found", StatusCode::NOT_FOUND),
|
||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
InvalidToken => ErrCode::authentication("invalid_token", StatusCode::FORBIDDEN),
|
||||
Maintenance => ErrCode::internal("maintenance", StatusCode::SERVICE_UNAVAILABLE),
|
||||
MissingAuthorizationHeader => ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED),
|
||||
NotFound => ErrCode::invalid("not_found", StatusCode::NOT_FOUND),
|
||||
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
|
||||
RetrieveDocument => ErrCode::internal("retrieve_document", StatusCode::BAD_REQUEST),
|
||||
RetrieveDocument => ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST),
|
||||
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
|
||||
UnsupportedMediaType => ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE),
|
||||
}
|
||||
@ -134,7 +142,7 @@ impl Code {
|
||||
|
||||
/// return the doc url ascociated with the error
|
||||
fn url(&self) -> String {
|
||||
format!("https://docs.meilisearch.com/error/{}", self.name())
|
||||
format!("https://docs.meilisearch.com/errors#{}", self.name())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "meilisearch-http"
|
||||
description = "MeiliSearch HTTP server"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
license = "MIT"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
@ -27,17 +27,17 @@ bytes = "0.5.4"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
crossbeam-channel = "0.4.2"
|
||||
env_logger = "0.7.1"
|
||||
flate2 = "1.0.16"
|
||||
futures = "0.3.4"
|
||||
http = "0.1.19"
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
log = "0.4.8"
|
||||
main_error = "0.1.0"
|
||||
meilisearch-core = { path = "../meilisearch-core", version = "0.11.1" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.11.1" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.11.1" }
|
||||
meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.11.1"}
|
||||
meilisearch-core = { path = "../meilisearch-core", version = "0.14.1" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.14.1" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.14.1" }
|
||||
meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.14.1"}
|
||||
mime = "0.3.16"
|
||||
pretty-bytes = "0.2.2"
|
||||
rand = "0.7.3"
|
||||
regex = "1.3.6"
|
||||
rustls = "0.16.0"
|
||||
@ -48,7 +48,8 @@ sha2 = "0.8.1"
|
||||
siphasher = "0.3.2"
|
||||
slice-group-by = "0.2.6"
|
||||
structopt = "0.3.12"
|
||||
sysinfo = "0.14.5"
|
||||
tar = "0.4.29"
|
||||
tempfile = "3.1.0"
|
||||
tokio = { version = "0.2.18", features = ["macros"] }
|
||||
ureq = { version = "0.12.0", features = ["tls"], default-features = false }
|
||||
walkdir = "2.3.1"
|
||||
@ -70,9 +71,9 @@ features = [
|
||||
optional = true
|
||||
|
||||
[dev-dependencies]
|
||||
serde_url_params = "0.2.0"
|
||||
tempdir = "0.3.7"
|
||||
tokio = { version = "0.2.18", features = ["macros", "time"] }
|
||||
serde_url_params = "0.2.0"
|
||||
|
||||
[dev-dependencies.assert-json-diff]
|
||||
git = "https://github.com/qdequele/assert-json-diff"
|
||||
|
@ -136,13 +136,13 @@
|
||||
<div class="level-item has-text-centered">
|
||||
<div>
|
||||
<p class="heading">Documents</p>
|
||||
<p id="count" class="title">25</p>
|
||||
<p id="count" class="title">0</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="level-item has-text-centered">
|
||||
<div>
|
||||
<p class="heading">Time Spent</p>
|
||||
<p id="time" class="title">4ms</p>
|
||||
<p id="time" class="title">N/A</p>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
@ -221,7 +221,7 @@
|
||||
results.innerHTML = '';
|
||||
|
||||
let processingTimeMs = httpResults.processingTimeMs;
|
||||
let numberOfDocuments = httpResults.hits.length;
|
||||
let numberOfDocuments = httpResults.nbHits;
|
||||
time.innerHTML = `${processingTimeMs}ms`;
|
||||
count.innerHTML = `${numberOfDocuments}`;
|
||||
|
||||
@ -299,6 +299,8 @@
|
||||
refreshIndexList();
|
||||
|
||||
search.oninput = triggerSearch;
|
||||
|
||||
let select = document.getElementById("index");
|
||||
select.onchange = triggerSearch;
|
||||
|
||||
triggerSearch();
|
||||
|
@ -1,9 +1,9 @@
|
||||
use std::error::Error;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use meilisearch_core::{Database, DatabaseOptions};
|
||||
use sha2::Digest;
|
||||
use sysinfo::Pid;
|
||||
|
||||
use crate::index_update_callback;
|
||||
use crate::option::Opt;
|
||||
@ -26,7 +26,7 @@ pub struct DataInner {
|
||||
pub db: Arc<Database>,
|
||||
pub db_path: String,
|
||||
pub api_keys: ApiKeys,
|
||||
pub server_pid: Pid,
|
||||
pub server_pid: u32,
|
||||
pub http_payload_size_limit: usize,
|
||||
}
|
||||
|
||||
@ -55,18 +55,18 @@ impl ApiKeys {
|
||||
}
|
||||
|
||||
impl Data {
|
||||
pub fn new(opt: Opt) -> Data {
|
||||
pub fn new(opt: Opt) -> Result<Data, Box<dyn Error>> {
|
||||
let db_path = opt.db_path.clone();
|
||||
let server_pid = sysinfo::get_current_pid().unwrap();
|
||||
let server_pid = std::process::id();
|
||||
|
||||
let db_opt = DatabaseOptions {
|
||||
main_map_size: opt.main_map_size,
|
||||
update_map_size: opt.update_map_size,
|
||||
main_map_size: opt.max_mdb_size,
|
||||
update_map_size: opt.max_udb_size,
|
||||
};
|
||||
|
||||
let http_payload_size_limit = opt.http_payload_size_limit;
|
||||
|
||||
let db = Arc::new(Database::open_or_create(opt.db_path, db_opt).unwrap());
|
||||
let db = Arc::new(Database::open_or_create(opt.db_path, db_opt)?);
|
||||
|
||||
let mut api_keys = ApiKeys {
|
||||
master: opt.master_key,
|
||||
@ -93,6 +93,6 @@ impl Data {
|
||||
index_update_callback(&index_uid, &callback_context, status);
|
||||
}));
|
||||
|
||||
data
|
||||
Ok(data)
|
||||
}
|
||||
}
|
||||
|
@ -114,10 +114,10 @@ impl fmt::Display for FacetCountError {
|
||||
use FacetCountError::*;
|
||||
|
||||
match self {
|
||||
AttributeNotSet(attr) => write!(f, "attribute {} is not set as facet", attr),
|
||||
SyntaxError(msg) => write!(f, "syntax error: {}", msg),
|
||||
UnexpectedToken { expected, found } => write!(f, "unexpected {} found, expected {:?}", found, expected),
|
||||
NoFacetSet => write!(f, "can't perform facet count, as no facet is set"),
|
||||
AttributeNotSet(attr) => write!(f, "Attribute {} is not set as facet", attr),
|
||||
SyntaxError(msg) => write!(f, "Syntax error: {}", msg),
|
||||
UnexpectedToken { expected, found } => write!(f, "Unexpected {} found, expected {:?}", found, expected),
|
||||
NoFacetSet => write!(f, "Can't perform facet count, as no facet is set"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -195,9 +195,9 @@ impl fmt::Display for Error {
|
||||
Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"),
|
||||
Self::NotFound(err) => write!(f, "{} not found", err),
|
||||
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
|
||||
Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err),
|
||||
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
|
||||
Self::PayloadTooLarge => f.write_str("Payload to large"),
|
||||
Self::RetrieveDocument(id, err) => write!(f, "Impossible to retrieve the document with id: {}; {}", id, err),
|
||||
Self::SearchDocuments(err) => write!(f, "Impossible to search documents; {}", err),
|
||||
Self::PayloadTooLarge => f.write_str("Payload too large"),
|
||||
Self::UnsupportedMediaType => f.write_str("Unsupported media type"),
|
||||
}
|
||||
}
|
||||
@ -236,6 +236,18 @@ impl From<actix_http::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(err: std::io::Error) -> Error {
|
||||
Error::Internal(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_core::Error> for Error {
|
||||
fn from(err: meilisearch_core::Error) -> Error {
|
||||
Error::Internal(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FacetCountError> for ResponseError {
|
||||
fn from(err: FacetCountError) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
|
@ -43,6 +43,7 @@ pub struct LoggingMiddleware<S> {
|
||||
service: Rc<RefCell<S>>,
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
impl<S, B> Service for LoggingMiddleware<S>
|
||||
where
|
||||
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = actix_web::Error> + 'static,
|
||||
|
@ -20,11 +20,11 @@ use slice_group_by::GroupBy;
|
||||
use crate::error::{Error, ResponseError};
|
||||
|
||||
pub trait IndexSearchExt {
|
||||
fn new_search(&self, query: String) -> SearchBuilder;
|
||||
fn new_search(&self, query: Option<String>) -> SearchBuilder;
|
||||
}
|
||||
|
||||
impl IndexSearchExt for Index {
|
||||
fn new_search(&self, query: String) -> SearchBuilder {
|
||||
fn new_search(&self, query: Option<String>) -> SearchBuilder {
|
||||
SearchBuilder {
|
||||
index: self,
|
||||
query,
|
||||
@ -43,7 +43,7 @@ impl IndexSearchExt for Index {
|
||||
|
||||
pub struct SearchBuilder<'a> {
|
||||
index: &'a Index,
|
||||
query: String,
|
||||
query: Option<String>,
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
attributes_to_crop: Option<HashMap<String, usize>>,
|
||||
@ -156,7 +156,7 @@ impl<'a> SearchBuilder<'a> {
|
||||
query_builder.set_facets(self.facets);
|
||||
|
||||
let start = Instant::now();
|
||||
let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
||||
let result = query_builder.query(reader, self.query.as_deref(), self.offset..(self.offset + self.limit));
|
||||
let search_result = result.map_err(Error::search_documents)?;
|
||||
let time_ms = start.elapsed().as_millis() as usize;
|
||||
|
||||
@ -245,7 +245,7 @@ impl<'a> SearchBuilder<'a> {
|
||||
nb_hits: search_result.nb_hits,
|
||||
exhaustive_nb_hits: search_result.exhaustive_nb_hit,
|
||||
processing_time_ms: time_ms,
|
||||
query: self.query.to_string(),
|
||||
query: self.query.unwrap_or_default(),
|
||||
facets_distribution: search_result.facets,
|
||||
exhaustive_facets_count: search_result.exhaustive_facets_count,
|
||||
};
|
||||
|
@ -7,6 +7,7 @@ pub mod models;
|
||||
pub mod option;
|
||||
pub mod routes;
|
||||
pub mod analytics;
|
||||
pub mod snapshot;
|
||||
|
||||
use actix_http::Error;
|
||||
use actix_service::ServiceFactory;
|
||||
|
@ -4,8 +4,9 @@ use actix_cors::Cors;
|
||||
use actix_web::{middleware, HttpServer};
|
||||
use main_error::MainError;
|
||||
use meilisearch_http::helpers::NormalizePath;
|
||||
use meilisearch_http::{Data, Opt, create_app, index_update_callback};
|
||||
use meilisearch_http::{create_app, index_update_callback, Data, Opt};
|
||||
use structopt::StructOpt;
|
||||
use meilisearch_http::snapshot;
|
||||
|
||||
mod analytics;
|
||||
|
||||
@ -19,7 +20,11 @@ async fn main() -> Result<(), MainError> {
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "sentry"))]
|
||||
let _sentry = sentry::init((
|
||||
"https://5ddfa22b95f241198be2271aaf028653@sentry.io/3060337",
|
||||
if !opt.no_sentry {
|
||||
Some(opt.sentry_dsn.clone())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
sentry::ClientOptions {
|
||||
release: sentry::release_name!(),
|
||||
..Default::default()
|
||||
@ -36,8 +41,8 @@ async fn main() -> Result<(), MainError> {
|
||||
}
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "sentry"))]
|
||||
if !opt.no_analytics {
|
||||
sentry::integrations::panic::register_panic_handler();
|
||||
if !opt.no_sentry && _sentry.is_enabled() {
|
||||
sentry::integrations::panic::register_panic_handler(); // TODO: This shouldn't be needed when upgrading to sentry 0.19.0. These integrations are turned on by default when using `sentry::init`.
|
||||
sentry::integrations::env_logger::init(None, Default::default());
|
||||
}
|
||||
}
|
||||
@ -47,14 +52,16 @@ async fn main() -> Result<(), MainError> {
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
let data = Data::new(opt.clone());
|
||||
if let Some(path) = &opt.load_from_snapshot {
|
||||
snapshot::load_snapshot(&opt.db_path, path, opt.ignore_snapshot_if_db_exists, opt.ignore_missing_snapshot)?;
|
||||
}
|
||||
|
||||
let data = Data::new(opt.clone())?;
|
||||
|
||||
if !opt.no_analytics {
|
||||
let analytics_data = data.clone();
|
||||
let analytics_opt = opt.clone();
|
||||
thread::spawn(move|| {
|
||||
analytics::analytics_sender(analytics_data, analytics_opt)
|
||||
});
|
||||
thread::spawn(move || analytics::analytics_sender(analytics_data, analytics_opt));
|
||||
}
|
||||
|
||||
let data_cloned = data.clone();
|
||||
@ -62,6 +69,10 @@ async fn main() -> Result<(), MainError> {
|
||||
index_update_callback(name, &data_cloned, status);
|
||||
}));
|
||||
|
||||
if let Some(path) = &opt.snapshot_path {
|
||||
snapshot::schedule_snapshot(data.clone(), &path, opt.snapshot_interval_sec.unwrap_or(86400))?;
|
||||
}
|
||||
|
||||
print_launch_resume(&opt, &data);
|
||||
|
||||
let http_server = HttpServer::new(move || {
|
||||
@ -117,6 +128,25 @@ pub fn print_launch_resume(opt: &Opt, data: &Data) {
|
||||
env!("CARGO_PKG_VERSION").to_string()
|
||||
);
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "sentry"))]
|
||||
eprintln!(
|
||||
"Sentry DSN:\t\t{:?}",
|
||||
if !opt.no_sentry {
|
||||
&opt.sentry_dsn
|
||||
} else {
|
||||
"Disabled"
|
||||
}
|
||||
);
|
||||
|
||||
eprintln!(
|
||||
"Amplitude Analytics:\t{:?}",
|
||||
if !opt.no_analytics {
|
||||
"Enabled"
|
||||
} else {
|
||||
"Disabled"
|
||||
}
|
||||
);
|
||||
|
||||
eprintln!();
|
||||
|
||||
if data.api_keys.master.is_some() {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::{error, fs};
|
||||
use std::io::{BufReader, Read};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::{error, fs};
|
||||
|
||||
use rustls::internal::pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||
use rustls::{
|
||||
@ -26,7 +26,18 @@ pub struct Opt {
|
||||
#[structopt(long, env = "MEILI_MASTER_KEY")]
|
||||
pub master_key: Option<String>,
|
||||
|
||||
/// This environment variable must be set to `production` if your are running in production.
|
||||
/// The Sentry DSN to use for error reporting. This defaults to the MeiliSearch Sentry project.
|
||||
/// You can disable sentry all together using the `--no-sentry` flag or `MEILI_NO_SENTRY` environment variable.
|
||||
#[cfg(all(not(debug_assertions), feature = "sentry"))]
|
||||
#[structopt(long, env = "SENTRY_DSN", default_value = "https://5ddfa22b95f241198be2271aaf028653@sentry.io/3060337")]
|
||||
pub sentry_dsn: String,
|
||||
|
||||
/// Disable Sentry error reporting.
|
||||
#[cfg(all(not(debug_assertions), feature = "sentry"))]
|
||||
#[structopt(long, env = "MEILI_NO_SENTRY")]
|
||||
pub no_sentry: bool,
|
||||
|
||||
/// This environment variable must be set to `production` if you are running in production.
|
||||
/// If the server is running in development mode more logs will be displayed,
|
||||
/// and the master key can be avoided which implies that there is no security on the updates routes.
|
||||
/// This is useful to debug when integrating the engine with another service.
|
||||
@ -38,12 +49,12 @@ pub struct Opt {
|
||||
pub no_analytics: bool,
|
||||
|
||||
/// The maximum size, in bytes, of the main lmdb database directory
|
||||
#[structopt(long, env = "MEILI_MAIN_MAP_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub main_map_size: usize,
|
||||
#[structopt(long, env = "MEILI_MAX_MDB_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub max_mdb_size: usize,
|
||||
|
||||
/// The maximum size, in bytes, of the update lmdb database directory
|
||||
#[structopt(long, env = "MEILI_UPDATE_MAP_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub update_map_size: usize,
|
||||
#[structopt(long, env = "MEILI_MAX_UDB_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub max_udb_size: usize,
|
||||
|
||||
/// The maximum size, in bytes, of accepted JSON payloads
|
||||
#[structopt(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "10485760")] // 10MB
|
||||
@ -82,6 +93,28 @@ pub struct Opt {
|
||||
/// SSL support tickets.
|
||||
#[structopt(long, env = "MEILI_SSL_TICKETS")]
|
||||
pub ssl_tickets: bool,
|
||||
|
||||
/// Defines the path of the snapshot file to import.
|
||||
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at
|
||||
/// the given path. If this option is not specified no snapshot is imported.
|
||||
#[structopt(long, env = "MEILI_LOAD_FROM_SNAPSHOT")]
|
||||
pub load_from_snapshot: Option<PathBuf>,
|
||||
|
||||
/// The engine will ignore a missing snapshot and not return an error in such case.
|
||||
#[structopt(long, requires = "load-from-snapshot", env = "MEILI_IGNORE_MISSING_SNAPSHOT")]
|
||||
pub ignore_missing_snapshot: bool,
|
||||
|
||||
/// The engine will skip snapshot importation and not return an error in such case.
|
||||
#[structopt(long, requires = "load-from-snapshot", env = "MEILI_IGNORE_SNAPSHOT_IF_DB_EXISTS")]
|
||||
pub ignore_snapshot_if_db_exists: bool,
|
||||
|
||||
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_PATH")]
|
||||
pub snapshot_path: Option<PathBuf>,
|
||||
|
||||
/// Defines time interval, in seconds, between each snapshot creation.
|
||||
#[structopt(long, requires = "snapshot-path", env = "MEILI_SNAPSHOT_INTERVAL_SEC")]
|
||||
pub snapshot_interval_sec: Option<u64>,
|
||||
}
|
||||
|
||||
impl Opt {
|
||||
@ -161,7 +194,7 @@ fn load_private_key(filename: PathBuf) -> Result<rustls::PrivateKey, Box<dyn err
|
||||
fn load_ocsp(filename: &Option<PathBuf>) -> Result<Vec<u8>, Box<dyn error::Error>> {
|
||||
let mut ret = Vec::new();
|
||||
|
||||
if let &Some(ref name) = filename {
|
||||
if let Some(ref name) = filename {
|
||||
fs::File::open(name)
|
||||
.map_err(|_| "cannot open ocsp file")?
|
||||
.read_to_end(&mut ret)
|
||||
|
@ -156,7 +156,7 @@ async fn update_multiple_documents(
|
||||
let mut schema = index
|
||||
.main
|
||||
.schema(&reader)?
|
||||
.ok_or(Error::internal("Impossible to retrieve the schema"))?;
|
||||
.ok_or(meilisearch_core::Error::SchemaMissing)?;
|
||||
|
||||
if schema.primary_key().is_none() {
|
||||
let id = match ¶ms.primary_key {
|
||||
@ -164,7 +164,7 @@ async fn update_multiple_documents(
|
||||
None => body
|
||||
.first()
|
||||
.and_then(find_primary_key)
|
||||
.ok_or(Error::bad_request("Could not infer a primary key"))?,
|
||||
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?
|
||||
};
|
||||
|
||||
schema
|
||||
|
@ -10,7 +10,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(get_health).service(change_healthyness);
|
||||
}
|
||||
|
||||
#[get("/health", wrap = "Authentication::Private")]
|
||||
#[get("/health")]
|
||||
async fn get_health(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
if let Ok(Some(_)) = data.db.get_health(&reader) {
|
||||
|
@ -253,19 +253,10 @@ async fn update_index(
|
||||
|
||||
if let Some(id) = body.primary_key.clone() {
|
||||
if let Some(mut schema) = index.main.schema(writer)? {
|
||||
match schema.primary_key() {
|
||||
Some(_) => {
|
||||
return Err(Error::bad_request(
|
||||
"The primary key cannot be updated",
|
||||
).into());
|
||||
}
|
||||
None => {
|
||||
schema.set_primary_key(&id)?;
|
||||
index.main.put_schema(writer, &schema)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
index.main.put_updated_at(writer)?;
|
||||
Ok(())
|
||||
})?;
|
||||
|
@ -24,7 +24,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct SearchQuery {
|
||||
q: String,
|
||||
q: Option<String>,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<String>,
|
||||
@ -50,7 +50,7 @@ async fn search_with_url_query(
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct SearchQueryPost {
|
||||
q: String,
|
||||
q: Option<String>,
|
||||
offset: Option<usize>,
|
||||
limit: Option<usize>,
|
||||
attributes_to_retrieve: Option<Vec<String>>,
|
||||
@ -177,7 +177,6 @@ impl SearchQuery {
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
search_builder.attributes_to_crop(final_attributes);
|
||||
}
|
||||
|
||||
@ -188,14 +187,12 @@ impl SearchQuery {
|
||||
for attr in &restricted_attributes {
|
||||
final_attributes.insert(attr.to_string());
|
||||
}
|
||||
} else {
|
||||
if available_attributes.contains(attribute) {
|
||||
} else if available_attributes.contains(attribute) {
|
||||
final_attributes.insert(attribute.to_string());
|
||||
} else {
|
||||
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
search_builder.attributes_to_highlight(final_attributes);
|
||||
}
|
||||
@ -246,6 +243,6 @@ fn prepare_facet_list(facets: &str, schema: &Schema, facet_attrs: &[FieldId]) ->
|
||||
}
|
||||
Ok(field_ids)
|
||||
}
|
||||
bad_val => return Err(FacetCountError::unexpected_token(bad_val, &["[String]"]))
|
||||
bad_val => Err(FacetCountError::unexpected_token(bad_val, &["[String]"]))
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web_macros::{delete, get, post};
|
||||
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES};
|
||||
use meilisearch_schema::Schema;
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
|
||||
use crate::error::{Error, ResponseError};
|
||||
@ -24,8 +25,6 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
.service(get_displayed)
|
||||
.service(update_displayed)
|
||||
.service(delete_displayed)
|
||||
.service(get_accept_new_fields)
|
||||
.service(update_accept_new_fields)
|
||||
.service(get_attributes_for_faceting)
|
||||
.service(delete_attributes_for_faceting)
|
||||
.service(update_attributes_for_faceting);
|
||||
@ -45,7 +44,7 @@ async fn update_all(
|
||||
let update_id = data.db.update_write::<_, _, ResponseError>(|writer| {
|
||||
let settings = body
|
||||
.into_inner()
|
||||
.into_update()
|
||||
.to_update()
|
||||
.map_err(Error::bad_request)?;
|
||||
let update_id = index.settings_update(writer, settings)?;
|
||||
Ok(update_id)
|
||||
@ -108,23 +107,8 @@ async fn get_all(
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
println!("{:?}", attributes_for_faceting);
|
||||
|
||||
let searchable_attributes = schema.clone().map(|s| {
|
||||
s.indexed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
});
|
||||
|
||||
let displayed_attributes = schema.clone().map(|s| {
|
||||
s.displayed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<HashSet<String>>()
|
||||
});
|
||||
|
||||
let accept_new_fields = schema.map(|s| s.accept_new_fields());
|
||||
let searchable_attributes = schema.as_ref().map(get_indexed_attributes);
|
||||
let displayed_attributes = schema.as_ref().map(get_displayed_attributes);
|
||||
|
||||
let settings = Settings {
|
||||
ranking_rules: Some(Some(ranking_rules)),
|
||||
@ -133,7 +117,6 @@ async fn get_all(
|
||||
displayed_attributes: Some(displayed_attributes),
|
||||
stop_words: Some(Some(stop_words)),
|
||||
synonyms: Some(Some(synonyms)),
|
||||
accept_new_fields: Some(accept_new_fields),
|
||||
attributes_for_faceting: Some(Some(attributes_for_faceting)),
|
||||
};
|
||||
|
||||
@ -158,7 +141,6 @@ async fn delete_all(
|
||||
displayed_attributes: UpdateState::Clear,
|
||||
stop_words: UpdateState::Clear,
|
||||
synonyms: UpdateState::Clear,
|
||||
accept_new_fields: UpdateState::Clear,
|
||||
attributes_for_faceting: UpdateState::Clear,
|
||||
};
|
||||
|
||||
@ -211,7 +193,7 @@ async fn update_rules(
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
@ -253,7 +235,12 @@ async fn get_distinct(
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let distinct_attribute = index.main.distinct_attribute(&reader)?;
|
||||
let distinct_attribute_id = index.main.distinct_attribute(&reader)?;
|
||||
let schema = index.main.schema(&reader)?;
|
||||
let distinct_attribute = match (schema, distinct_attribute_id) {
|
||||
(Some(schema), Some(id)) => schema.name(id).map(str::to_string),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
Ok(HttpResponse::Ok().json(distinct_attribute))
|
||||
}
|
||||
@ -277,7 +264,7 @@ async fn update_distinct(
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
@ -321,7 +308,7 @@ async fn get_searchable(
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let schema = index.main.schema(&reader)?;
|
||||
let searchable_attributes: Option<Vec<String>> =
|
||||
schema.map(|s| s.indexed_name().iter().map(|i| i.to_string()).collect());
|
||||
schema.as_ref().map(get_indexed_attributes);
|
||||
|
||||
Ok(HttpResponse::Ok().json(searchable_attributes))
|
||||
}
|
||||
@ -345,7 +332,7 @@ async fn update_searchable(
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
@ -391,8 +378,7 @@ async fn get_displayed(
|
||||
|
||||
let schema = index.main.schema(&reader)?;
|
||||
|
||||
let displayed_attributes: Option<HashSet<String>> =
|
||||
schema.map(|s| s.displayed_name().iter().map(|i| i.to_string()).collect());
|
||||
let displayed_attributes = schema.as_ref().map(get_displayed_attributes);
|
||||
|
||||
Ok(HttpResponse::Ok().json(displayed_attributes))
|
||||
}
|
||||
@ -416,7 +402,7 @@ async fn update_displayed(
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
@ -445,52 +431,6 @@ async fn delete_displayed(
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
||||
#[get(
|
||||
"/indexes/{index_uid}/settings/accept-new-fields",
|
||||
wrap = "Authentication::Private"
|
||||
)]
|
||||
async fn get_accept_new_fields(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let reader = data.db.main_read_txn()?;
|
||||
|
||||
let schema = index.main.schema(&reader)?;
|
||||
|
||||
let accept_new_fields = schema.map(|s| s.accept_new_fields());
|
||||
|
||||
Ok(HttpResponse::Ok().json(accept_new_fields))
|
||||
}
|
||||
|
||||
#[post(
|
||||
"/indexes/{index_uid}/settings/accept-new-fields",
|
||||
wrap = "Authentication::Private"
|
||||
)]
|
||||
async fn update_accept_new_fields(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<bool>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let settings = Settings {
|
||||
accept_new_fields: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
||||
#[get(
|
||||
"/indexes/{index_uid}/settings/attributes-for-faceting",
|
||||
wrap = "Authentication::Private"
|
||||
@ -544,7 +484,7 @@ async fn update_attributes_for_faceting(
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.into_update().map_err(Error::bad_request)?;
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
@ -572,3 +512,25 @@ async fn delete_attributes_for_faceting(
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
||||
fn get_indexed_attributes(schema: &Schema) -> Vec<String> {
|
||||
if schema.is_indexed_all() {
|
||||
["*"].iter().map(|s| s.to_string()).collect()
|
||||
} else {
|
||||
schema.indexed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_displayed_attributes(schema: &Schema) -> HashSet<String> {
|
||||
if schema.is_displayed_all() {
|
||||
["*"].iter().map(|s| s.to_string()).collect()
|
||||
} else {
|
||||
schema.displayed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
@ -5,9 +5,7 @@ use actix_web::HttpResponse;
|
||||
use actix_web_macros::get;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::error;
|
||||
use pretty_bytes::converter::convert;
|
||||
use serde::Serialize;
|
||||
use sysinfo::{NetworkExt, ProcessExt, ProcessorExt, System, SystemExt};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::error::{Error, ResponseError};
|
||||
@ -18,9 +16,7 @@ use crate::Data;
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(index_stats)
|
||||
.service(get_stats)
|
||||
.service(get_version)
|
||||
.service(get_sys_info)
|
||||
.service(get_sys_info_pretty);
|
||||
.service(get_version);
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@ -136,204 +132,3 @@ async fn get_version() -> HttpResponse {
|
||||
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SysGlobal {
|
||||
total_memory: u64,
|
||||
used_memory: u64,
|
||||
total_swap: u64,
|
||||
used_swap: u64,
|
||||
input_data: u64,
|
||||
output_data: u64,
|
||||
}
|
||||
|
||||
impl SysGlobal {
|
||||
fn new() -> SysGlobal {
|
||||
SysGlobal {
|
||||
total_memory: 0,
|
||||
used_memory: 0,
|
||||
total_swap: 0,
|
||||
used_swap: 0,
|
||||
input_data: 0,
|
||||
output_data: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SysProcess {
|
||||
memory: u64,
|
||||
cpu: f32,
|
||||
}
|
||||
|
||||
impl SysProcess {
|
||||
fn new() -> SysProcess {
|
||||
SysProcess {
|
||||
memory: 0,
|
||||
cpu: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SysInfo {
|
||||
memory_usage: f64,
|
||||
processor_usage: Vec<f32>,
|
||||
global: SysGlobal,
|
||||
process: SysProcess,
|
||||
}
|
||||
|
||||
impl SysInfo {
|
||||
fn new() -> SysInfo {
|
||||
SysInfo {
|
||||
memory_usage: 0.0,
|
||||
processor_usage: Vec::new(),
|
||||
global: SysGlobal::new(),
|
||||
process: SysProcess::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/sys-info", wrap = "Authentication::Private")]
|
||||
async fn get_sys_info(data: web::Data<Data>) -> HttpResponse {
|
||||
let mut sys = System::new();
|
||||
let mut info = SysInfo::new();
|
||||
|
||||
// need to refresh twice for cpu usage
|
||||
sys.refresh_all();
|
||||
sys.refresh_all();
|
||||
|
||||
for processor in sys.get_processors() {
|
||||
info.processor_usage.push(processor.get_cpu_usage());
|
||||
}
|
||||
|
||||
info.global.total_memory = sys.get_total_memory();
|
||||
info.global.used_memory = sys.get_used_memory();
|
||||
info.global.total_swap = sys.get_total_swap();
|
||||
info.global.used_swap = sys.get_used_swap();
|
||||
info.global.input_data = sys
|
||||
.get_networks()
|
||||
.into_iter()
|
||||
.map(|(_, n)| n.get_received())
|
||||
.sum::<u64>();
|
||||
info.global.output_data = sys
|
||||
.get_networks()
|
||||
.into_iter()
|
||||
.map(|(_, n)| n.get_transmitted())
|
||||
.sum::<u64>();
|
||||
|
||||
info.memory_usage = sys.get_used_memory() as f64 / sys.get_total_memory() as f64 * 100.0;
|
||||
|
||||
if let Some(process) = sys.get_process(data.server_pid) {
|
||||
info.process.memory = process.memory();
|
||||
println!("cpu usafe: {}", process.cpu_usage());
|
||||
info.process.cpu = process.cpu_usage();
|
||||
}
|
||||
|
||||
HttpResponse::Ok().json(info)
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SysGlobalPretty {
|
||||
total_memory: String,
|
||||
used_memory: String,
|
||||
total_swap: String,
|
||||
used_swap: String,
|
||||
input_data: String,
|
||||
output_data: String,
|
||||
}
|
||||
|
||||
impl SysGlobalPretty {
|
||||
fn new() -> SysGlobalPretty {
|
||||
SysGlobalPretty {
|
||||
total_memory: "None".to_owned(),
|
||||
used_memory: "None".to_owned(),
|
||||
total_swap: "None".to_owned(),
|
||||
used_swap: "None".to_owned(),
|
||||
input_data: "None".to_owned(),
|
||||
output_data: "None".to_owned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SysProcessPretty {
|
||||
memory: String,
|
||||
cpu: String,
|
||||
}
|
||||
|
||||
impl SysProcessPretty {
|
||||
fn new() -> SysProcessPretty {
|
||||
SysProcessPretty {
|
||||
memory: "None".to_owned(),
|
||||
cpu: "None".to_owned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct SysInfoPretty {
|
||||
memory_usage: String,
|
||||
processor_usage: Vec<String>,
|
||||
global: SysGlobalPretty,
|
||||
process: SysProcessPretty,
|
||||
}
|
||||
|
||||
impl SysInfoPretty {
|
||||
fn new() -> SysInfoPretty {
|
||||
SysInfoPretty {
|
||||
memory_usage: "None".to_owned(),
|
||||
processor_usage: Vec::new(),
|
||||
global: SysGlobalPretty::new(),
|
||||
process: SysProcessPretty::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/sys-info/pretty", wrap = "Authentication::Private")]
|
||||
async fn get_sys_info_pretty(data: web::Data<Data>) -> HttpResponse {
|
||||
let mut sys = System::new();
|
||||
let mut info = SysInfoPretty::new();
|
||||
|
||||
sys.refresh_all();
|
||||
sys.refresh_all();
|
||||
info.memory_usage = format!(
|
||||
"{:.1} %",
|
||||
sys.get_used_memory() as f64 / sys.get_total_memory() as f64 * 100.0
|
||||
);
|
||||
|
||||
for processor in sys.get_processors() {
|
||||
info.processor_usage
|
||||
.push(format!("{:.1} %", processor.get_cpu_usage()));
|
||||
}
|
||||
|
||||
info.global.total_memory = convert(sys.get_total_memory() as f64 * 1024.0);
|
||||
info.global.used_memory = convert(sys.get_used_memory() as f64 * 1024.0);
|
||||
info.global.total_swap = convert(sys.get_total_swap() as f64 * 1024.0);
|
||||
info.global.used_swap = convert(sys.get_used_swap() as f64 * 1024.0);
|
||||
info.global.input_data = convert(
|
||||
sys.get_networks()
|
||||
.into_iter()
|
||||
.map(|(_, n)| n.get_received())
|
||||
.sum::<u64>() as f64,
|
||||
);
|
||||
info.global.output_data = convert(
|
||||
sys.get_networks()
|
||||
.into_iter()
|
||||
.map(|(_, n)| n.get_transmitted())
|
||||
.sum::<u64>() as f64,
|
||||
);
|
||||
|
||||
if let Some(process) = sys.get_process(data.server_pid) {
|
||||
info.process.memory = convert(process.memory() as f64 * 1024.0);
|
||||
info.process.cpu = format!("{:.1} %", process.cpu_usage());
|
||||
}
|
||||
|
||||
HttpResponse::Ok().json(info)
|
||||
}
|
||||
|
124
meilisearch-http/src/snapshot.rs
Normal file
124
meilisearch-http/src/snapshot.rs
Normal file
@ -0,0 +1,124 @@
|
||||
use crate::Data;
|
||||
use crate::error::Error;
|
||||
|
||||
use flate2::Compression;
|
||||
use flate2::read::GzDecoder;
|
||||
use flate2::write::GzEncoder;
|
||||
use log::error;
|
||||
use std::fs::{create_dir_all, File};
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use std::thread;
|
||||
use std::time::{Duration};
|
||||
use tar::{Builder, Archive};
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn pack(src: &Path, dest: &Path) -> io::Result<()> {
|
||||
let f = File::create(dest)?;
|
||||
let gz_encoder = GzEncoder::new(f, Compression::default());
|
||||
|
||||
let mut tar_encoder = Builder::new(gz_encoder);
|
||||
tar_encoder.append_dir_all(".", src)?;
|
||||
let gz_encoder = tar_encoder.into_inner()?;
|
||||
|
||||
gz_encoder.finish()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn unpack(src: &Path, dest: &Path) -> Result<(), Error> {
|
||||
let f = File::open(src)?;
|
||||
let gz = GzDecoder::new(f);
|
||||
let mut ar = Archive::new(gz);
|
||||
|
||||
create_dir_all(dest)?;
|
||||
ar.unpack(dest)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load_snapshot(
|
||||
db_path: &str,
|
||||
snapshot_path: &Path,
|
||||
ignore_snapshot_if_db_exists: bool,
|
||||
ignore_missing_snapshot: bool
|
||||
) -> Result<(), Error> {
|
||||
let db_path = Path::new(db_path);
|
||||
|
||||
if !db_path.exists() && snapshot_path.exists() {
|
||||
unpack(snapshot_path, db_path)
|
||||
} else if db_path.exists() && !ignore_snapshot_if_db_exists {
|
||||
Err(Error::Internal(format!("database already exists at {:?}", db_path)))
|
||||
} else if !snapshot_path.exists() && !ignore_missing_snapshot {
|
||||
Err(Error::Internal(format!("snapshot doesn't exist at {:?}", snapshot_path)))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_snapshot(data: &Data, snapshot_path: &Path) -> Result<(), Error> {
|
||||
let tmp_dir = TempDir::new()?;
|
||||
|
||||
data.db.copy_and_compact_to_path(tmp_dir.path())?;
|
||||
|
||||
pack(tmp_dir.path(), snapshot_path).or_else(|e| Err(Error::Internal(format!("something went wrong during snapshot compression: {}", e))))
|
||||
}
|
||||
|
||||
pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Result<(), Error> {
|
||||
if snapshot_dir.file_name().is_none() {
|
||||
return Err(Error::Internal("invalid snapshot file path".to_string()));
|
||||
}
|
||||
let db_name = Path::new(&data.db_path).file_name().ok_or_else(|| Error::Internal("invalid database name".to_string()))?;
|
||||
create_dir_all(snapshot_dir)?;
|
||||
let snapshot_path = snapshot_dir.join(format!("{}.tar.gz", db_name.to_str().unwrap_or("data.ms")));
|
||||
|
||||
thread::spawn(move || loop {
|
||||
thread::sleep(Duration::from_secs(time_gap_s));
|
||||
if let Err(e) = create_snapshot(&data, &snapshot_path) {
|
||||
error!("Unsuccessful snapshot creation: {}", e);
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::io::prelude::*;
|
||||
use std::fs;
|
||||
|
||||
#[test]
|
||||
fn test_pack_unpack() {
|
||||
let tempdir = TempDir::new().unwrap();
|
||||
|
||||
let test_dir = tempdir.path();
|
||||
let src_dir = test_dir.join("src");
|
||||
let dest_dir = test_dir.join("complex/destination/path/");
|
||||
let archive_path = test_dir.join("archive.tar.gz");
|
||||
|
||||
let file_1_relative = Path::new("file1.txt");
|
||||
let subfolder_relative = Path::new("subfolder/");
|
||||
let file_2_relative = Path::new("subfolder/file2.txt");
|
||||
|
||||
create_dir_all(src_dir.join(subfolder_relative)).unwrap();
|
||||
File::create(src_dir.join(file_1_relative)).unwrap().write_all(b"Hello_file_1").unwrap();
|
||||
File::create(src_dir.join(file_2_relative)).unwrap().write_all(b"Hello_file_2").unwrap();
|
||||
|
||||
|
||||
assert!(pack(&src_dir, &archive_path).is_ok());
|
||||
assert!(archive_path.exists());
|
||||
assert!(load_snapshot(&dest_dir.to_str().unwrap(), &archive_path, false, false).is_ok());
|
||||
|
||||
assert!(dest_dir.exists());
|
||||
assert!(dest_dir.join(file_1_relative).exists());
|
||||
assert!(dest_dir.join(subfolder_relative).exists());
|
||||
assert!(dest_dir.join(file_2_relative).exists());
|
||||
|
||||
let contents = fs::read_to_string(dest_dir.join(file_1_relative)).unwrap();
|
||||
assert_eq!(contents, "Hello_file_1");
|
||||
|
||||
let contents = fs::read_to_string(dest_dir.join(file_2_relative)).unwrap();
|
||||
assert_eq!(contents, "Hello_file_2");
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,7 @@
|
||||
"balance": "$2,668.55",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lucas Hess",
|
||||
"gender": "male",
|
||||
"email": "lucashess@chorizon.com",
|
||||
@ -26,7 +26,7 @@
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -90,7 +90,7 @@
|
||||
"balance": "$2,575.78",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 39,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Mariana Pacheco",
|
||||
"gender": "female",
|
||||
"email": "marianapacheco@chorizon.com",
|
||||
@ -110,7 +110,7 @@
|
||||
"balance": "$3,793.09",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 20,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Warren Watson",
|
||||
"gender": "male",
|
||||
"email": "warrenwatson@chorizon.com",
|
||||
@ -155,7 +155,7 @@
|
||||
"balance": "$1,349.50",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Chrystal Boyd",
|
||||
"gender": "female",
|
||||
"email": "chrystalboyd@chorizon.com",
|
||||
@ -235,7 +235,7 @@
|
||||
"balance": "$1,351.43",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Evans Wagner",
|
||||
"gender": "male",
|
||||
"email": "evanswagner@chorizon.com",
|
||||
@ -431,7 +431,7 @@
|
||||
"balance": "$1,986.48",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Florence Long",
|
||||
"gender": "female",
|
||||
"email": "florencelong@chorizon.com",
|
||||
@ -530,7 +530,7 @@
|
||||
"balance": "$3,973.43",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 29,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Sykes Conley",
|
||||
"gender": "male",
|
||||
"email": "sykesconley@chorizon.com",
|
||||
@ -813,7 +813,7 @@
|
||||
"balance": "$1,992.38",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 40,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Christina Short",
|
||||
"gender": "female",
|
||||
"email": "christinashort@chorizon.com",
|
||||
@ -944,7 +944,7 @@
|
||||
"balance": "$2,893.45",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 22,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Joni Spears",
|
||||
"gender": "female",
|
||||
"email": "jonispears@chorizon.com",
|
||||
@ -988,7 +988,7 @@
|
||||
"balance": "$1,348.04",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 34,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lawson Curtis",
|
||||
"gender": "male",
|
||||
"email": "lawsoncurtis@chorizon.com",
|
||||
@ -1006,7 +1006,7 @@
|
||||
"balance": "$1,132.41",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Goff May",
|
||||
"gender": "male",
|
||||
"email": "goffmay@chorizon.com",
|
||||
@ -1026,7 +1026,7 @@
|
||||
"balance": "$1,201.87",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Goodman Becker",
|
||||
"gender": "male",
|
||||
"email": "goodmanbecker@chorizon.com",
|
||||
@ -1069,7 +1069,7 @@
|
||||
"balance": "$1,947.08",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 21,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Guerra Mcintyre",
|
||||
"gender": "male",
|
||||
"email": "guerramcintyre@chorizon.com",
|
||||
@ -1153,7 +1153,7 @@
|
||||
"balance": "$2,113.29",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Richards Walls",
|
||||
"gender": "male",
|
||||
"email": "richardswalls@chorizon.com",
|
||||
@ -1211,7 +1211,7 @@
|
||||
"balance": "$1,844.56",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 20,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Kaitlin Conner",
|
||||
"gender": "female",
|
||||
"email": "kaitlinconner@chorizon.com",
|
||||
@ -1229,7 +1229,7 @@
|
||||
"balance": "$2,876.10",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Mamie Fischer",
|
||||
"gender": "female",
|
||||
"email": "mamiefischer@chorizon.com",
|
||||
@ -1252,7 +1252,7 @@
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -1291,7 +1291,7 @@
|
||||
"balance": "$2,813.41",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 37,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Charles Castillo",
|
||||
"gender": "male",
|
||||
"email": "charlescastillo@chorizon.com",
|
||||
@ -1433,7 +1433,7 @@
|
||||
"balance": "$1,539.98",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 24,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Angelina Dyer",
|
||||
"gender": "female",
|
||||
"email": "angelinadyer@chorizon.com",
|
||||
@ -1493,7 +1493,7 @@
|
||||
"balance": "$3,381.63",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Candace Sawyer",
|
||||
"gender": "female",
|
||||
"email": "candacesawyer@chorizon.com",
|
||||
@ -1514,7 +1514,7 @@
|
||||
"balance": "$1,640.98",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Hendricks Martinez",
|
||||
"gender": "male",
|
||||
"email": "hendricksmartinez@chorizon.com",
|
||||
@ -1557,7 +1557,7 @@
|
||||
"balance": "$1,180.90",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Stark Wong",
|
||||
"gender": "male",
|
||||
"email": "starkwong@chorizon.com",
|
||||
@ -1577,7 +1577,7 @@
|
||||
"balance": "$1,913.42",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 24,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Emma Jacobs",
|
||||
"gender": "female",
|
||||
"email": "emmajacobs@chorizon.com",
|
||||
@ -1595,7 +1595,7 @@
|
||||
"balance": "$1,274.29",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 25,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Clarice Gardner",
|
||||
"gender": "female",
|
||||
"email": "claricegardner@chorizon.com",
|
||||
|
@ -1,16 +1,32 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use actix_web::{http::StatusCode, test};
|
||||
use serde_json::{json, Value};
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::{http::StatusCode, test};
|
||||
use meilisearch_core::DatabaseOptions;
|
||||
use meilisearch_http::data::Data;
|
||||
use meilisearch_http::option::Opt;
|
||||
use meilisearch_http::helpers::NormalizePath;
|
||||
use tempdir::TempDir;
|
||||
use tokio::time::delay_for;
|
||||
|
||||
use meilisearch_core::DatabaseOptions;
|
||||
use meilisearch_http::data::Data;
|
||||
use meilisearch_http::helpers::NormalizePath;
|
||||
use meilisearch_http::option::Opt;
|
||||
|
||||
/// Performs a search test on both post and get routes
|
||||
#[macro_export]
|
||||
macro_rules! test_post_get_search {
|
||||
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
|
||||
let post_query: meilisearch_http::routes::search::SearchQueryPost = serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
|
||||
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
|
||||
let ($response, $status_code) = $server.search_get(&get_query).await;
|
||||
let _ =::std::panic::catch_unwind(|| $block)
|
||||
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||
let ($response, $status_code) = $server.search_post($query).await;
|
||||
let _ = ::std::panic::catch_unwind(|| $block)
|
||||
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Server {
|
||||
uid: String,
|
||||
data: Data,
|
||||
@ -28,13 +44,13 @@ impl Server {
|
||||
master_key: None,
|
||||
env: "development".to_owned(),
|
||||
no_analytics: true,
|
||||
main_map_size: default_db_options.main_map_size,
|
||||
update_map_size: default_db_options.update_map_size,
|
||||
max_mdb_size: default_db_options.main_map_size,
|
||||
max_udb_size: default_db_options.update_map_size,
|
||||
http_payload_size_limit: 10000000,
|
||||
..Opt::default()
|
||||
};
|
||||
|
||||
let data = Data::new(opt.clone());
|
||||
let data = Data::new(opt.clone()).unwrap();
|
||||
|
||||
Server {
|
||||
uid: uid.to_string(),
|
||||
@ -96,7 +112,6 @@ impl Server {
|
||||
"longitude",
|
||||
"tags",
|
||||
],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(body).await;
|
||||
@ -111,17 +126,19 @@ impl Server {
|
||||
|
||||
|
||||
pub async fn wait_update_id(&mut self, update_id: u64) {
|
||||
loop {
|
||||
// try 10 times to get status, or panic to not wait forever
|
||||
for _ in 0..10 {
|
||||
let (response, status_code) = self.get_update_status(update_id).await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
if response["status"] == "processed" || response["status"] == "error" {
|
||||
if response["status"] == "processed" || response["status"] == "failed" {
|
||||
eprintln!("{:#?}", response);
|
||||
return;
|
||||
}
|
||||
|
||||
delay_for(Duration::from_secs(1)).await;
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
// Global Http request GET/POST/DELETE async or sync
|
||||
@ -408,16 +425,6 @@ impl Server {
|
||||
self.delete_request_async(&url).await
|
||||
}
|
||||
|
||||
pub async fn get_accept_new_fields(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/accept-new-fields", self.uid);
|
||||
self.get_request(&url).await
|
||||
}
|
||||
|
||||
pub async fn update_accept_new_fields(&mut self, body: Value) {
|
||||
let url = format!("/indexes/{}/settings/accept-new-fields", self.uid);
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn get_synonyms(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/synonyms", self.uid);
|
||||
self.get_request(&url).await
|
||||
@ -476,59 +483,4 @@ impl Server {
|
||||
pub async fn get_sys_info_pretty(&mut self) -> (Value, StatusCode) {
|
||||
self.get_request("/sys-info/pretty").await
|
||||
}
|
||||
|
||||
// Populate routes
|
||||
|
||||
pub async fn populate_movies(&mut self) {
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
self.create_index(body).await;
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"desc(popularity)",
|
||||
"exactness",
|
||||
"desc(vote_average)",
|
||||
],
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"overview",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres",
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"director",
|
||||
"producer",
|
||||
"tagline",
|
||||
"genres",
|
||||
"id",
|
||||
"overview",
|
||||
"vote_count",
|
||||
"vote_average",
|
||||
"poster_path",
|
||||
"popularity",
|
||||
],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
self.update_all_settings(body).await;
|
||||
|
||||
let dataset = include_bytes!("assets/movies.json");
|
||||
|
||||
let body: Value = serde_json::from_slice(dataset).unwrap();
|
||||
|
||||
self.add_or_replace_multiple_documents(body).await;
|
||||
}
|
||||
}
|
||||
|
@ -175,3 +175,43 @@ async fn check_add_documents_with_nested_sequence() {
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["hits"], body);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
// test sample from #807
|
||||
async fn add_document_with_long_field() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
server.create_index(json!({ "uid": "test" })).await;
|
||||
let body = json!([{
|
||||
"documentId":"de1c2adbb897effdfe0deae32a01035e46f932ce",
|
||||
"rank":1,
|
||||
"relurl":"/configuration/app/web.html#locations",
|
||||
"section":"Web",
|
||||
"site":"docs",
|
||||
"text":" The locations block is the most powerful, and potentially most involved, section of the .platform.app.yaml file. It allows you to control how the application container responds to incoming requests at a very fine-grained level. Common patterns also vary between language containers due to the way PHP-FPM handles incoming requests.\nEach entry of the locations block is an absolute URI path (with leading /) and its value includes the configuration directives for how the web server should handle matching requests. That is, if your domain is example.com then '/' means “requests for example.com/”, while '/admin' means “requests for example.com/admin”. If multiple blocks could match an incoming request then the most-specific will apply.\nweb:locations:'/':# Rules for all requests that don't otherwise match....'/sites/default/files':# Rules for any requests that begin with /sites/default/files....The simplest possible locations configuration is one that simply passes all requests on to your application unconditionally:\nweb:locations:'/':passthru:trueThat is, all requests to /* should be forwarded to the process started by web.commands.start above. Note that for PHP containers the passthru key must specify what PHP file the request should be forwarded to, and must also specify a docroot under which the file lives. For example:\nweb:locations:'/':root:'web'passthru:'/app.php'This block will serve requests to / from the web directory in the application, and if a file doesn’t exist on disk then the request will be forwarded to the /app.php script.\nA full list of the possible subkeys for locations is below.\n root: The folder from which to serve static assets for this location relative to the application root. The application root is the directory in which the .platform.app.yaml file is located. Typical values for this property include public or web. Setting it to '' is not recommended, and its behavior may vary depending on the type of application. Absolute paths are not supported.\n passthru: Whether to forward disallowed and missing resources from this location to the application and can be true, false or an absolute URI path (with leading /). The default value is false. For non-PHP applications it will generally be just true or false. In a PHP application this will typically be the front controller such as /index.php or /app.php. This entry works similar to mod_rewrite under Apache. Note: If the value of passthru does not begin with the same value as the location key it is under, the passthru may evaluate to another entry. That may be useful when you want different cache settings for different paths, for instance, but want missing files in all of them to map back to the same front controller. See the example block below.\n index: The files to consider when serving a request for a directory: an array of file names or null. (typically ['index.html']). Note that in order for this to work, access to the static files named must be allowed by the allow or rules keys for this location.\n expires: How long to allow static assets from this location to be cached (this enables the Cache-Control and Expires headers) and can be a time or -1 for no caching (default). Times can be suffixed with “ms” (milliseconds), “s” (seconds), “m” (minutes), “h” (hours), “d” (days), “w” (weeks), “M” (months, 30d) or “y” (years, 365d).\n scripts: Whether to allow loading scripts in that location (true or false). This directive is only meaningful on PHP.\n allow: Whether to allow serving files which don’t match a rule (true or false, default: true).\n headers: Any additional headers to apply to static assets. This section is a mapping of header names to header values. Responses from the application aren’t affected, to avoid overlap with the application’s own ability to include custom headers in the response.\n rules: Specific overrides for a specific location. The key is a PCRE (regular expression) that is matched against the full request path.\n request_buffering: Most application servers do not support chunked requests (e.g. fpm, uwsgi), so Platform.sh enables request_buffering by default to handle them. That default configuration would look like this if it was present in .platform.app.yaml:\nweb:locations:'/':passthru:truerequest_buffering:enabled:truemax_request_size:250mIf the application server can already efficiently handle chunked requests, the request_buffering subkey can be modified to disable it entirely (enabled: false). Additionally, applications that frequently deal with uploads greater than 250MB in size can update the max_request_size key to the application’s needs. Note that modifications to request_buffering will need to be specified at each location where it is desired.\n ",
|
||||
"title":"Locations",
|
||||
"url":"/configuration/app/web.html#locations"
|
||||
}]);
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
let (response, _status) = server.search_post(json!({ "q": "request_buffering" })).await;
|
||||
assert!(!response["hits"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn documents_with_same_id_are_overwritten() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
server.create_index(json!({ "uid": "test"})).await;
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "test1"
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"content": "test2"
|
||||
},
|
||||
]);
|
||||
server.add_or_replace_multiple_documents(documents).await;
|
||||
let (response, _status) = server.get_all_documents().await;
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
assert_eq!(response.as_array().unwrap()[0].as_object().unwrap()["content"], "test2");
|
||||
}
|
||||
|
@ -2,30 +2,33 @@ mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let (_response, status_code) = server.get_document(419704).await;
|
||||
let (_response, status_code) = server.get_document(50).await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
server.delete_document(419704).await;
|
||||
server.delete_document(50).await;
|
||||
|
||||
let (_response, status_code) = server.get_document(419704).await;
|
||||
let (_response, status_code) = server.get_document(50).await;
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
// Resolve the issue https://github.com/meilisearch/MeiliSearch/issues/493
|
||||
#[actix_rt::test]
|
||||
async fn delete_batch() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let (_response, status_code) = server.get_document(419704).await;
|
||||
let doc_ids = vec!(50, 55, 60);
|
||||
for doc_id in &doc_ids {
|
||||
let (_response, status_code) = server.get_document(doc_id).await;
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
let body = serde_json::json!([419704, 512200, 181812]);
|
||||
let body = serde_json::json!(&doc_ids);
|
||||
server.delete_multiple_documents(body).await;
|
||||
|
||||
let (_response, status_code) = server.get_document(419704).await;
|
||||
for doc_id in &doc_ids {
|
||||
let (_response, status_code) = server.get_document(doc_id).await;
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
}
|
||||
|
196
meilisearch-http/tests/errors.rs
Normal file
196
meilisearch-http/tests/errors.rs
Normal file
@ -0,0 +1,196 @@
|
||||
mod common;
|
||||
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_http::http::StatusCode;
|
||||
use serde_json::{json, Map, Value};
|
||||
|
||||
macro_rules! assert_error {
|
||||
($code:literal, $type:literal, $status:path, $req:expr) => {
|
||||
let (response, status_code) = $req;
|
||||
assert_eq!(status_code, $status);
|
||||
assert_eq!(response["errorCode"].as_str().unwrap(), $code);
|
||||
assert_eq!(response["errorType"].as_str().unwrap(), $type);
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! assert_error_async {
|
||||
($code:literal, $type:literal, $server:expr, $req:expr) => {
|
||||
let (response, _) = $req;
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
for _ in 1..10 {
|
||||
let (response, status_code) = $server.get_update_status(update_id).await;
|
||||
assert_eq!(status_code, StatusCode::OK);
|
||||
if response["status"] == "processed" || response["status"] == "failed" {
|
||||
println!("response: {}", response);
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["errorCode"], $code);
|
||||
assert_eq!(response["errorType"], $type);
|
||||
return
|
||||
}
|
||||
thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_already_exists_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "test"
|
||||
});
|
||||
let (response, status_code) = server.create_index(body.clone()).await;
|
||||
println!("{}", response);
|
||||
assert_eq!(status_code, StatusCode::CREATED);
|
||||
assert_error!(
|
||||
"index_already_exists",
|
||||
"invalid_request_error",
|
||||
StatusCode::BAD_REQUEST,
|
||||
server.create_index(body).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_not_found_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
assert_error!(
|
||||
"index_not_found",
|
||||
"invalid_request_error",
|
||||
StatusCode::NOT_FOUND,
|
||||
server.get_index().await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn primary_key_already_present_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
"primaryKey": "test"
|
||||
});
|
||||
server.create_index(body.clone()).await;
|
||||
let body = json!({
|
||||
"primaryKey": "t"
|
||||
});
|
||||
assert_error!(
|
||||
"primary_key_already_present",
|
||||
"invalid_request_error",
|
||||
StatusCode::BAD_REQUEST,
|
||||
server.update_index(body).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn max_field_limit_exceeded_error() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
let mut doc = Map::with_capacity(70_000);
|
||||
doc.insert("id".into(), Value::String("foo".into()));
|
||||
for i in 0..69_999 {
|
||||
doc.insert(format!("field{}", i), Value::String("foo".into()));
|
||||
}
|
||||
let docs = json!([doc]);
|
||||
assert_error_async!(
|
||||
"max_fields_limit_exceeded",
|
||||
"invalid_request_error",
|
||||
server,
|
||||
server.add_or_replace_multiple_documents_sync(docs).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn missing_document_id() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
"primaryKey": "test"
|
||||
});
|
||||
server.create_index(body).await;
|
||||
let docs = json!([
|
||||
{
|
||||
"foo": "bar",
|
||||
}
|
||||
]);
|
||||
assert_error_async!(
|
||||
"missing_document_id",
|
||||
"invalid_request_error",
|
||||
server,
|
||||
server.add_or_replace_multiple_documents_sync(docs).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn facet_error() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
let search = json!({
|
||||
"q": "foo",
|
||||
"facetFilters": ["test:hello"]
|
||||
});
|
||||
assert_error!(
|
||||
"invalid_facet",
|
||||
"invalid_request_error",
|
||||
StatusCode::BAD_REQUEST,
|
||||
server.search_post(search).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filters_error() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
let search = json!({
|
||||
"q": "foo",
|
||||
"filters": "fo:12"
|
||||
});
|
||||
assert_error!(
|
||||
"invalid_filter",
|
||||
"invalid_request_error",
|
||||
StatusCode::BAD_REQUEST,
|
||||
server.search_post(search).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn bad_request_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"foo": "bar",
|
||||
});
|
||||
assert_error!(
|
||||
"bad_request",
|
||||
"invalid_request_error",
|
||||
StatusCode::BAD_REQUEST,
|
||||
server.search_post(body).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn document_not_found_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
server.create_index(json!({"uid": "test"})).await;
|
||||
assert_error!(
|
||||
"document_not_found",
|
||||
"invalid_request_error",
|
||||
StatusCode::NOT_FOUND,
|
||||
server.get_document(100).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn payload_too_large_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let bigvec = vec![0u64; 10_000_000]; // 80mb
|
||||
assert_error!(
|
||||
"payload_too_large",
|
||||
"invalid_request_error",
|
||||
StatusCode::PAYLOAD_TOO_LARGE,
|
||||
server.create_index(json!(bigvec)).await);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn missing_primary_key_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
server.create_index(json!({"uid": "test"})).await;
|
||||
let document = json!([{
|
||||
"content": "test"
|
||||
}]);
|
||||
assert_error!(
|
||||
"missing_primary_key",
|
||||
"invalid_request_error",
|
||||
StatusCode::BAD_REQUEST,
|
||||
server.add_or_replace_multiple_documents_sync(document).await);
|
||||
}
|
@ -1,6 +1,5 @@
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
|
||||
mod common;
|
||||
|
||||
@ -658,36 +657,8 @@ async fn check_add_documents_without_primary_key() {
|
||||
|
||||
let (response, status_code) = server.add_or_replace_multiple_documents_sync(body).await;
|
||||
|
||||
let message = response["message"].as_str().unwrap();
|
||||
assert_eq!(response.as_object().unwrap().len(), 4);
|
||||
assert_eq!(message, "Could not infer a primary key");
|
||||
assert_eq!(response["errorCode"], "missing_primary_key");
|
||||
assert_eq!(status_code, 400);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn check_first_update_should_bring_up_processed_status_after_first_docs_addition() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let dataset = include_bytes!("assets/movies.json");
|
||||
|
||||
let body: Value = serde_json::from_slice(dataset).unwrap();
|
||||
|
||||
// 2. Index the documents from movies.json, present inside of assets directory
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 3. Fetch the status of the indexing done above.
|
||||
let (response, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 4. Verify the fetch is successful and indexing status is 'processed'
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response[0]["status"], "processed");
|
||||
}
|
||||
|
200
meilisearch-http/tests/index_update.rs
Normal file
200
meilisearch-http/tests/index_update.rs
Normal file
@ -0,0 +1,200 @@
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use assert_json_diff::assert_json_include;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn check_first_update_should_bring_up_processed_status_after_first_docs_addition() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let dataset = include_bytes!("assets/test_set.json");
|
||||
|
||||
let body: Value = serde_json::from_slice(dataset).unwrap();
|
||||
|
||||
// 2. Index the documents from movies.json, present inside of assets directory
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 3. Fetch the status of the indexing done above.
|
||||
let (response, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 4. Verify the fetch is successful and indexing status is 'processed'
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response[0]["status"], "processed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_error_when_get_update_status_of_unexisting_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
// 1. Fetch the status of unexisting index.
|
||||
let (_, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 2. Verify the fetch returned 404
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_empty_when_get_update_status_of_empty_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
// 2. Fetch the status of empty index.
|
||||
let (response, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 3. Verify the fetch is successful, and no document are returned
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response, json!([]));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_update_status_of_pushed_documents() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
|
||||
let bodies = vec![
|
||||
json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]),
|
||||
json!([{
|
||||
"title": "Test1",
|
||||
"comment": "comment test1"
|
||||
}]),
|
||||
json!([{
|
||||
"title": "Test2",
|
||||
"comment": "comment test2"
|
||||
}]),
|
||||
];
|
||||
|
||||
let mut update_ids = Vec::new();
|
||||
|
||||
let url = "/indexes/test/documents?primaryKey=title";
|
||||
for body in bodies {
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
update_ids.push(update_id);
|
||||
}
|
||||
|
||||
// 2. Fetch the status of index.
|
||||
let (response, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 3. Verify the fetch is successful, and updates are returned
|
||||
|
||||
let expected = json!([{
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_ids[0]
|
||||
},{
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_ids[1]
|
||||
},{
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_ids[2]
|
||||
},]);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_include!(actual: json!(response), expected: expected);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_error_if_index_does_not_exist() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let (response, status_code) = server.get_update_status(42).await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
assert_eq!(response["errorCode"], "index_not_found");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_error_if_update_does_not_exist() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let (response, status_code) = server.get_update_status(42).await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
assert_eq!(response["errorCode"], "not_found");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn should_return_existing_update() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/test/documents?primaryKey=title";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
|
||||
let expected = json!({
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_id
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_update_status(update_id).await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_include!(actual: json!(response), expected: expected);
|
||||
}
|
497
meilisearch-http/tests/placeholder_search.rs
Normal file
497
meilisearch-http/tests/placeholder_search.rs
Normal file
@ -0,0 +1,497 @@
|
||||
use std::convert::Into;
|
||||
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use std::sync::Mutex;
|
||||
use std::cell::RefCell;
|
||||
|
||||
#[macro_use] mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_limit() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json! ({
|
||||
"limit": 3
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 3);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_offset() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json!({
|
||||
"limit": 6,
|
||||
});
|
||||
|
||||
// hack to take a value out of macro (must implement UnwindSafe)
|
||||
let expected = Mutex::new(RefCell::new(Vec::new()));
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(status_code, 200);
|
||||
// take results at offset 3 as reference
|
||||
let lock = expected.lock().unwrap();
|
||||
lock.replace(response["hits"].as_array().unwrap()[3..6].iter().cloned().collect());
|
||||
});
|
||||
|
||||
let expected = expected.into_inner().unwrap().into_inner();
|
||||
|
||||
let query = json!({
|
||||
"limit": 3,
|
||||
"offset": 3,
|
||||
});
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(status_code, 200);
|
||||
let response = response["hits"].as_array().unwrap();
|
||||
assert_eq!(&expected, response);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_attribute_to_highlight_wildcard() {
|
||||
// there should be no highlight in placeholder search
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json!({
|
||||
"limit": 1,
|
||||
"attributesToHighlight": ["*"]
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(status_code, 200);
|
||||
let result = response["hits"]
|
||||
.as_array()
|
||||
.unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap();
|
||||
for value in result.values() {
|
||||
assert!(value.to_string().find("<em>").is_none());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_matches() {
|
||||
// matches is always empty
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json!({
|
||||
"matches": true
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(status_code, 200);
|
||||
let result = response["hits"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|v| v.as_object().unwrap()["_matchesInfo"].clone())
|
||||
.all(|m| m.as_object().unwrap().is_empty());
|
||||
assert!(result);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_witch_crop() {
|
||||
// placeholder search crop always crop from beggining
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json!({
|
||||
"attributesToCrop": ["about"],
|
||||
"cropLength": 20
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
|
||||
for hit in hits {
|
||||
let hit = hit.as_object().unwrap();
|
||||
let formatted = hit["_formatted"].as_object().unwrap();
|
||||
|
||||
let about = hit["about"].as_str().unwrap();
|
||||
let about_formatted = formatted["about"].as_str().unwrap();
|
||||
// the formatted about length should be about 20 characters long
|
||||
assert!(about_formatted.len() < 20 + 10);
|
||||
// the formatted part should be located at the beginning of the original one
|
||||
assert_eq!(about.find(&about_formatted).unwrap(), 0);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_attributes_to_retrieve() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json!({
|
||||
"limit": 1,
|
||||
"attributesToRetrieve": ["gender", "about"],
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hit = response["hits"]
|
||||
.as_array()
|
||||
.unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap();
|
||||
assert_eq!(hit.values().count(), 2);
|
||||
let _ = hit["gender"];
|
||||
let _ = hit["about"];
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_filter() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json!({
|
||||
"filters": "color='green'"
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
assert!(hits.iter().all(|v| v["color"].as_str().unwrap() == "Green"));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
"filters": "tags=bug"
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
let value = Value::String(String::from("bug"));
|
||||
assert!(hits.iter().all(|v| v["tags"].as_array().unwrap().contains(&value)));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
"filters": "color='green' AND (tags='bug' OR tags='wontfix')"
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
let bug = Value::String(String::from("bug"));
|
||||
let wontfix = Value::String(String::from("wontfix"));
|
||||
assert!(hits.iter().all(|v|
|
||||
v["color"].as_str().unwrap() == "Green" &&
|
||||
v["tags"].as_array().unwrap().contains(&bug) ||
|
||||
v["tags"].as_array().unwrap().contains(&wontfix)));
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_test_faceted_search_valid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// simple tests on attributes with string value
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color"]
|
||||
});
|
||||
|
||||
server.update_all_settings(body).await;
|
||||
|
||||
let query = json!({
|
||||
"facetFilters": ["color:green"]
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "Green"));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
"facetFilters": [["color:blue"]]
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "blue"));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
"facetFilters": ["color:Blue"]
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "blue"));
|
||||
});
|
||||
|
||||
// test on arrays: ["tags:bug"]
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color", "tags"]
|
||||
});
|
||||
|
||||
server.update_all_settings(body).await;
|
||||
|
||||
let query = json!({
|
||||
"facetFilters": ["tags:bug"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||
});
|
||||
|
||||
// test and: ["color:blue", "tags:bug"]
|
||||
let query = json!({
|
||||
"facetFilters": ["color:blue", "tags:bug"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
&& value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||
});
|
||||
|
||||
// test or: [["color:blue", "color:green"]]
|
||||
let query = json!({
|
||||
"facetFilters": [["color:blue", "color:green"]]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value|
|
||||
value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "Green"));
|
||||
});
|
||||
// test and-or: ["tags:bug", ["color:blue", "color:green"]]
|
||||
let query = json!({
|
||||
"facetFilters": ["tags:bug", ["color:blue", "color:green"]]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(!response.get("hits").unwrap().as_array().unwrap().is_empty());
|
||||
assert!(response
|
||||
.get("hits")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value|
|
||||
value
|
||||
.get("tags")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.contains(&Value::String("bug".to_owned()))
|
||||
&& (value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "Green")));
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_test_faceted_search_invalid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
//no faceted attributes set
|
||||
let query = json!({
|
||||
"facetFilters": ["color:blue"]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color", "tags"]
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
// empty arrays are error
|
||||
// []
|
||||
let query = json!({
|
||||
"facetFilters": []
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
// [[]]
|
||||
let query = json!({
|
||||
"facetFilters": [[]]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
// ["color:green", []]
|
||||
let query = json!({
|
||||
"facetFilters": ["color:green", []]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
|
||||
// too much depth
|
||||
// [[[]]]
|
||||
let query = json!({
|
||||
"facetFilters": [[[]]]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
// [["color:green", ["color:blue"]]]
|
||||
let query = json!({
|
||||
"facetFilters": [["color:green", ["color:blue"]]]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
// "color:green"
|
||||
let query = json!({
|
||||
"facetFilters": "color:green"
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_test_facet_count() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// test without facet distribution
|
||||
let query = json!({
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code|{
|
||||
assert!(response.get("exhaustiveFacetsCount").is_none());
|
||||
assert!(response.get("facetsDistribution").is_none());
|
||||
});
|
||||
|
||||
// test no facets set, search on color
|
||||
let query = json!({
|
||||
"facetsDistribution": ["color"]
|
||||
});
|
||||
test_post_get_search!(server, query.clone(), |_response, status_code|{
|
||||
assert_eq!(status_code, 400);
|
||||
});
|
||||
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color", "tags"]
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
// same as before, but now facets are set:
|
||||
test_post_get_search!(server, query, |response, _status_code|{
|
||||
println!("{}", response);
|
||||
assert!(response.get("exhaustiveFacetsCount").is_some());
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 1);
|
||||
});
|
||||
// searching on color and tags
|
||||
let query = json!({
|
||||
"facetsDistribution": ["color", "tags"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code|{
|
||||
let facets = response.get("facetsDistribution").unwrap().as_object().unwrap();
|
||||
assert_eq!(facets.values().count(), 2);
|
||||
assert_ne!(!facets.get("color").unwrap().as_object().unwrap().values().count(), 0);
|
||||
assert_ne!(!facets.get("tags").unwrap().as_object().unwrap().values().count(), 0);
|
||||
});
|
||||
// wildcard
|
||||
let query = json!({
|
||||
"facetsDistribution": ["*"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code|{
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 2);
|
||||
});
|
||||
// wildcard with other attributes:
|
||||
let query = json!({
|
||||
"facetsDistribution": ["color", "*"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code|{
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 2);
|
||||
});
|
||||
|
||||
// empty facet list
|
||||
let query = json!({
|
||||
"facetsDistribution": []
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code|{
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 0);
|
||||
});
|
||||
|
||||
// attr not set as facet passed:
|
||||
let query = json!({
|
||||
"facetsDistribution": ["gender"]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code|{
|
||||
assert_eq!(status_code, 400);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[should_panic]
|
||||
async fn placeholder_test_bad_facet_distribution() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
// string instead of array:
|
||||
let query = json!({
|
||||
"facetsDistribution": "color"
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, _status_code| {});
|
||||
|
||||
// invalid value in array:
|
||||
let query = json!({
|
||||
"facetsDistribution": ["color", true]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, _status_code| {});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_test_sort() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": ["asc(age)"],
|
||||
"attributesForFaceting": ["color"]
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
let query = json!({ });
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
hits.iter().map(|v| v["age"].as_u64().unwrap()).fold(0, |prev, cur| {
|
||||
assert!(cur >= prev);
|
||||
cur
|
||||
});
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
"facetFilters": ["color:green"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
hits.iter().map(|v| v["age"].as_u64().unwrap()).fold(0, |prev, cur| {
|
||||
assert!(cur >= prev);
|
||||
cur
|
||||
});
|
||||
});
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -6,8 +6,7 @@ mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_settings_basic() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let config = json!({
|
||||
"rankingRules": [
|
||||
@ -16,93 +15,68 @@ async fn search_with_settings_basic() {
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"desc(popularity)",
|
||||
"desc(age)",
|
||||
"exactness",
|
||||
"desc(vote_average)"
|
||||
"desc(balance)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"overview",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres"
|
||||
"name",
|
||||
"age",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"director",
|
||||
"producer",
|
||||
"tagline",
|
||||
"genres",
|
||||
"id",
|
||||
"overview",
|
||||
"vote_count",
|
||||
"vote_average",
|
||||
"poster_path",
|
||||
"popularity"
|
||||
"name",
|
||||
"age",
|
||||
"gender",
|
||||
"color",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"balance"
|
||||
],
|
||||
"stopWords": null,
|
||||
"synonyms": null,
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(config).await;
|
||||
|
||||
let query = "q=the%20avangers&limit=3";
|
||||
let query = "q=ea%20exercitation&limit=3";
|
||||
|
||||
let expect = json!([
|
||||
{
|
||||
"id": 24428,
|
||||
"popularity": 44.506,
|
||||
"vote_average": 7.7,
|
||||
"title": "The Avengers",
|
||||
"tagline": "Some assembly required.",
|
||||
"overview": "When an unexpected enemy emerges and threatens global safety and security, Nick Fury, director of the international peacekeeping agency known as S.H.I.E.L.D., finds himself in need of a team to pull the world back from the brink of disaster. Spanning the globe, a daring recruitment effort begins!",
|
||||
"director": "Joss Whedon",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Science Fiction",
|
||||
"Action",
|
||||
"Adventure"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/cezWGskPY5x7GaglTTRN4Fugfb8.jpg",
|
||||
"vote_count": 21079
|
||||
"balance": "$2,467.47",
|
||||
"age": 34,
|
||||
"color": "blue",
|
||||
"name": "Patricia Goff",
|
||||
"gender": "female",
|
||||
"email": "patriciagoff@chorizon.com",
|
||||
"phone": "+1 (864) 463-2277",
|
||||
"address": "866 Hornell Loop, Cresaptown, Ohio, 1700"
|
||||
},
|
||||
{
|
||||
"id": 299534,
|
||||
"popularity": 38.659,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Endgame",
|
||||
"tagline": "Part of the journey is the end.",
|
||||
"overview": "After the devastating events of Avengers: Infinity War, the universe is in ruins due to the efforts of the Mad Titan, Thanos. With the help of remaining allies, the Avengers must assemble once more in order to undo Thanos' actions and restore order to the universe once and for all, no matter what consequences may be in store.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Science Fiction",
|
||||
"Action"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg",
|
||||
"vote_count": 10497
|
||||
"balance": "$3,344.40",
|
||||
"age": 35,
|
||||
"color": "blue",
|
||||
"name": "Adeline Flynn",
|
||||
"gender": "female",
|
||||
"email": "adelineflynn@chorizon.com",
|
||||
"phone": "+1 (994) 600-2840",
|
||||
"address": "428 Paerdegat Avenue, Hollymead, Pennsylvania, 948"
|
||||
},
|
||||
{
|
||||
"id": 299536,
|
||||
"popularity": 65.013,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Infinity War",
|
||||
"tagline": "An entire universe. Once and for all.",
|
||||
"overview": "As the Avengers and their allies have continued to protect the world from threats too large for any one hero to handle, a new danger has emerged from the cosmic shadows: Thanos. A despot of intergalactic infamy, his goal is to collect all six Infinity Stones, artifacts of unimaginable power, and use them to inflict his twisted will on all of reality. Everything the Avengers have fought for has led up to this moment - the fate of Earth and existence itself has never been more uncertain.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Action",
|
||||
"Science Fiction"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg",
|
||||
"vote_count": 16056
|
||||
"balance": "$3,394.96",
|
||||
"age": 25,
|
||||
"color": "blue",
|
||||
"name": "Aida Kirby",
|
||||
"gender": "female",
|
||||
"email": "aidakirby@chorizon.com",
|
||||
"phone": "+1 (942) 532-2325",
|
||||
"address": "797 Engert Avenue, Wilsonia, Idaho, 6532"
|
||||
}
|
||||
]);
|
||||
|
||||
@ -112,8 +86,7 @@ async fn search_with_settings_basic() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_settings_stop_words() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let config = json!({
|
||||
"rankingRules": [
|
||||
@ -122,93 +95,67 @@ async fn search_with_settings_stop_words() {
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"desc(popularity)",
|
||||
"desc(age)",
|
||||
"exactness",
|
||||
"desc(vote_average)"
|
||||
"desc(balance)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"overview",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres"
|
||||
"name",
|
||||
"age",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"director",
|
||||
"producer",
|
||||
"tagline",
|
||||
"genres",
|
||||
"id",
|
||||
"overview",
|
||||
"vote_count",
|
||||
"vote_average",
|
||||
"poster_path",
|
||||
"popularity"
|
||||
"name",
|
||||
"age",
|
||||
"gender",
|
||||
"color",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"balance"
|
||||
],
|
||||
"stopWords": ["the"],
|
||||
"stopWords": ["ea"],
|
||||
"synonyms": null,
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(config).await;
|
||||
|
||||
let query = "q=the%20avangers&limit=3";
|
||||
let query = "q=ea%20exercitation&limit=3";
|
||||
let expect = json!([
|
||||
{
|
||||
"id": 299536,
|
||||
"popularity": 65.013,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Infinity War",
|
||||
"tagline": "An entire universe. Once and for all.",
|
||||
"overview": "As the Avengers and their allies have continued to protect the world from threats too large for any one hero to handle, a new danger has emerged from the cosmic shadows: Thanos. A despot of intergalactic infamy, his goal is to collect all six Infinity Stones, artifacts of unimaginable power, and use them to inflict his twisted will on all of reality. Everything the Avengers have fought for has led up to this moment - the fate of Earth and existence itself has never been more uncertain.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Action",
|
||||
"Science Fiction"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg",
|
||||
"vote_count": 16056
|
||||
"balance": "$1,921.58",
|
||||
"age": 31,
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
"phone": "+1 (912) 430-3243",
|
||||
"address": "883 Dennett Place, Knowlton, New Mexico, 9219"
|
||||
},
|
||||
{
|
||||
"id": 299534,
|
||||
"popularity": 38.659,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Endgame",
|
||||
"tagline": "Part of the journey is the end.",
|
||||
"overview": "After the devastating events of Avengers: Infinity War, the universe is in ruins due to the efforts of the Mad Titan, Thanos. With the help of remaining allies, the Avengers must assemble once more in order to undo Thanos' actions and restore order to the universe once and for all, no matter what consequences may be in store.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Science Fiction",
|
||||
"Action"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg",
|
||||
"vote_count": 10497
|
||||
"balance": "$1,706.13",
|
||||
"age": 27,
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
"phone": "+1 (995) 479-3174",
|
||||
"address": "442 Beverly Road, Ventress, New Mexico, 3361"
|
||||
},
|
||||
{
|
||||
"id": 99861,
|
||||
"popularity": 33.938,
|
||||
"vote_average": 7.3,
|
||||
"title": "Avengers: Age of Ultron",
|
||||
"tagline": "A New Age Has Come.",
|
||||
"overview": "When Tony Stark tries to jumpstart a dormant peacekeeping program, things go awry and Earth’s Mightiest Heroes are put to the ultimate test as the fate of the planet hangs in the balance. As the villainous Ultron emerges, it is up to The Avengers to stop him from enacting his terrible plans, and soon uneasy alliances and unexpected action pave the way for an epic and unique global adventure.",
|
||||
"director": "Joss Whedon",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Action",
|
||||
"Adventure",
|
||||
"Science Fiction"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/t90Y3G8UGQp0f0DrP60wRu9gfrH.jpg",
|
||||
"vote_count": 14661
|
||||
"balance": "$1,476.39",
|
||||
"age": 28,
|
||||
"color": "brown",
|
||||
"name": "Maureen Dale",
|
||||
"gender": "female",
|
||||
"email": "maureendale@chorizon.com",
|
||||
"phone": "+1 (984) 538-3684",
|
||||
"address": "817 Newton Street, Bannock, Wyoming, 1468"
|
||||
}
|
||||
]);
|
||||
|
||||
@ -218,8 +165,7 @@ async fn search_with_settings_stop_words() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_settings_synonyms() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let config = json!({
|
||||
"rankingRules": [
|
||||
@ -228,98 +174,71 @@ async fn search_with_settings_synonyms() {
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"desc(popularity)",
|
||||
"desc(age)",
|
||||
"exactness",
|
||||
"desc(vote_average)"
|
||||
"desc(balance)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"overview",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres"
|
||||
"name",
|
||||
"age",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"director",
|
||||
"producer",
|
||||
"tagline",
|
||||
"genres",
|
||||
"id",
|
||||
"overview",
|
||||
"vote_count",
|
||||
"vote_average",
|
||||
"poster_path",
|
||||
"popularity"
|
||||
"name",
|
||||
"age",
|
||||
"gender",
|
||||
"color",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"balance"
|
||||
],
|
||||
"stopWords": null,
|
||||
"synonyms": {
|
||||
"avangers": [
|
||||
"Captain America",
|
||||
"Iron Man"
|
||||
"application": [
|
||||
"exercitation"
|
||||
]
|
||||
},
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(config).await;
|
||||
|
||||
let query = "q=avangers&limit=3";
|
||||
let query = "q=application&limit=3";
|
||||
let expect = json!([
|
||||
{
|
||||
"id": 299536,
|
||||
"popularity": 65.013,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Infinity War",
|
||||
"tagline": "An entire universe. Once and for all.",
|
||||
"overview": "As the Avengers and their allies have continued to protect the world from threats too large for any one hero to handle, a new danger has emerged from the cosmic shadows: Thanos. A despot of intergalactic infamy, his goal is to collect all six Infinity Stones, artifacts of unimaginable power, and use them to inflict his twisted will on all of reality. Everything the Avengers have fought for has led up to this moment - the fate of Earth and existence itself has never been more uncertain.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Action",
|
||||
"Science Fiction"
|
||||
],
|
||||
"vote_count": 16056,
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg"
|
||||
"balance": "$1,921.58",
|
||||
"age": 31,
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
"phone": "+1 (912) 430-3243",
|
||||
"address": "883 Dennett Place, Knowlton, New Mexico, 9219"
|
||||
},
|
||||
{
|
||||
"id": 299534,
|
||||
"popularity": 38.659,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Endgame",
|
||||
"tagline": "Part of the journey is the end.",
|
||||
"overview": "After the devastating events of Avengers: Infinity War, the universe is in ruins due to the efforts of the Mad Titan, Thanos. With the help of remaining allies, the Avengers must assemble once more in order to undo Thanos' actions and restore order to the universe once and for all, no matter what consequences may be in store.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Science Fiction",
|
||||
"Action"
|
||||
],
|
||||
"vote_count": 10497,
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg"
|
||||
"balance": "$1,706.13",
|
||||
"age": 27,
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
"phone": "+1 (995) 479-3174",
|
||||
"address": "442 Beverly Road, Ventress, New Mexico, 3361"
|
||||
},
|
||||
{
|
||||
"id": 99861,
|
||||
"popularity": 33.938,
|
||||
"vote_average": 7.3,
|
||||
"title": "Avengers: Age of Ultron",
|
||||
"tagline": "A New Age Has Come.",
|
||||
"overview": "When Tony Stark tries to jumpstart a dormant peacekeeping program, things go awry and Earth’s Mightiest Heroes are put to the ultimate test as the fate of the planet hangs in the balance. As the villainous Ultron emerges, it is up to The Avengers to stop him from enacting his terrible plans, and soon uneasy alliances and unexpected action pave the way for an epic and unique global adventure.",
|
||||
"director": "Joss Whedon",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Action",
|
||||
"Adventure",
|
||||
"Science Fiction"
|
||||
],
|
||||
"vote_count": 14661,
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/t90Y3G8UGQp0f0DrP60wRu9gfrH.jpg"
|
||||
"balance": "$1,476.39",
|
||||
"age": 28,
|
||||
"color": "brown",
|
||||
"name": "Maureen Dale",
|
||||
"gender": "female",
|
||||
"email": "maureendale@chorizon.com",
|
||||
"phone": "+1 (984) 538-3684",
|
||||
"address": "817 Newton Street, Bannock, Wyoming, 1468"
|
||||
}
|
||||
]);
|
||||
|
||||
@ -329,8 +248,7 @@ async fn search_with_settings_synonyms() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_settings_ranking_rules() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let config = json!({
|
||||
"rankingRules": [
|
||||
@ -339,104 +257,78 @@ async fn search_with_settings_ranking_rules() {
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"asc(vote_average)",
|
||||
"desc(age)",
|
||||
"exactness",
|
||||
"desc(popularity)"
|
||||
"desc(balance)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"overview",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres"
|
||||
"name",
|
||||
"age",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"director",
|
||||
"producer",
|
||||
"tagline",
|
||||
"genres",
|
||||
"id",
|
||||
"overview",
|
||||
"vote_count",
|
||||
"vote_average",
|
||||
"poster_path",
|
||||
"popularity"
|
||||
"name",
|
||||
"age",
|
||||
"gender",
|
||||
"color",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"balance"
|
||||
],
|
||||
"stopWords": null,
|
||||
"synonyms": null,
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(config).await;
|
||||
|
||||
let query = "q=avangers&limit=3";
|
||||
let query = "q=exarcitation&limit=3";
|
||||
let expect = json!([
|
||||
{
|
||||
"id": 99861,
|
||||
"popularity": 33.938,
|
||||
"vote_average": 7.3,
|
||||
"title": "Avengers: Age of Ultron",
|
||||
"tagline": "A New Age Has Come.",
|
||||
"overview": "When Tony Stark tries to jumpstart a dormant peacekeeping program, things go awry and Earth’s Mightiest Heroes are put to the ultimate test as the fate of the planet hangs in the balance. As the villainous Ultron emerges, it is up to The Avengers to stop him from enacting his terrible plans, and soon uneasy alliances and unexpected action pave the way for an epic and unique global adventure.",
|
||||
"director": "Joss Whedon",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Action",
|
||||
"Adventure",
|
||||
"Science Fiction"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/t90Y3G8UGQp0f0DrP60wRu9gfrH.jpg",
|
||||
"vote_count": 14661
|
||||
"balance": "$1,921.58",
|
||||
"age": 31,
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
"phone": "+1 (912) 430-3243",
|
||||
"address": "883 Dennett Place, Knowlton, New Mexico, 9219"
|
||||
},
|
||||
{
|
||||
"id": 299536,
|
||||
"popularity": 65.013,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Infinity War",
|
||||
"tagline": "An entire universe. Once and for all.",
|
||||
"overview": "As the Avengers and their allies have continued to protect the world from threats too large for any one hero to handle, a new danger has emerged from the cosmic shadows: Thanos. A despot of intergalactic infamy, his goal is to collect all six Infinity Stones, artifacts of unimaginable power, and use them to inflict his twisted will on all of reality. Everything the Avengers have fought for has led up to this moment - the fate of Earth and existence itself has never been more uncertain.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Action",
|
||||
"Science Fiction"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg",
|
||||
"vote_count": 16056
|
||||
"balance": "$1,706.13",
|
||||
"age": 27,
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
"phone": "+1 (995) 479-3174",
|
||||
"address": "442 Beverly Road, Ventress, New Mexico, 3361"
|
||||
},
|
||||
{
|
||||
"id": 299534,
|
||||
"popularity": 38.659,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Endgame",
|
||||
"tagline": "Part of the journey is the end.",
|
||||
"overview": "After the devastating events of Avengers: Infinity War, the universe is in ruins due to the efforts of the Mad Titan, Thanos. With the help of remaining allies, the Avengers must assemble once more in order to undo Thanos' actions and restore order to the universe once and for all, no matter what consequences may be in store.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Science Fiction",
|
||||
"Action"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg",
|
||||
"vote_count": 10497
|
||||
"balance": "$1,476.39",
|
||||
"age": 28,
|
||||
"color": "brown",
|
||||
"name": "Maureen Dale",
|
||||
"gender": "female",
|
||||
"email": "maureendale@chorizon.com",
|
||||
"phone": "+1 (984) 538-3684",
|
||||
"address": "817 Newton Street, Bannock, Wyoming, 1468"
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, _status_code) = server.search_get(query).await;
|
||||
println!("{}", response["hits"].clone());
|
||||
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_settings_searchable_attributes() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let config = json!({
|
||||
"rankingRules": [
|
||||
@ -445,92 +337,59 @@ async fn search_with_settings_searchable_attributes() {
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"desc(popularity)",
|
||||
"desc(age)",
|
||||
"exactness",
|
||||
"desc(vote_average)"
|
||||
"desc(balance)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"tagline",
|
||||
"overview",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres"
|
||||
"age",
|
||||
"color",
|
||||
"gender",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"director",
|
||||
"producer",
|
||||
"tagline",
|
||||
"genres",
|
||||
"id",
|
||||
"overview",
|
||||
"vote_count",
|
||||
"vote_average",
|
||||
"poster_path",
|
||||
"popularity"
|
||||
"name",
|
||||
"age",
|
||||
"gender",
|
||||
"color",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"balance"
|
||||
],
|
||||
"stopWords": null,
|
||||
"synonyms": null,
|
||||
"acceptNewFields": false,
|
||||
"synonyms": {
|
||||
"exarcitation": [
|
||||
"exercitation"
|
||||
]
|
||||
},
|
||||
});
|
||||
|
||||
server.update_all_settings(config).await;
|
||||
|
||||
let query = "q=avangers&limit=3";
|
||||
let query = "q=Carol&limit=3";
|
||||
let expect = json!([
|
||||
{
|
||||
"id": 299536,
|
||||
"popularity": 65.013,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Infinity War",
|
||||
"tagline": "An entire universe. Once and for all.",
|
||||
"overview": "As the Avengers and their allies have continued to protect the world from threats too large for any one hero to handle, a new danger has emerged from the cosmic shadows: Thanos. A despot of intergalactic infamy, his goal is to collect all six Infinity Stones, artifacts of unimaginable power, and use them to inflict his twisted will on all of reality. Everything the Avengers have fought for has led up to this moment - the fate of Earth and existence itself has never been more uncertain.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Action",
|
||||
"Science Fiction"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg",
|
||||
"vote_count": 16056
|
||||
"balance": "$1,440.09",
|
||||
"age": 40,
|
||||
"color": "blue",
|
||||
"name": "Levy Whitley",
|
||||
"gender": "male",
|
||||
"email": "levywhitley@chorizon.com",
|
||||
"phone": "+1 (911) 458-2411",
|
||||
"address": "187 Thomas Street, Hachita, North Carolina, 2989"
|
||||
},
|
||||
{
|
||||
"id": 299534,
|
||||
"popularity": 38.659,
|
||||
"vote_average": 8.3,
|
||||
"title": "Avengers: Endgame",
|
||||
"tagline": "Part of the journey is the end.",
|
||||
"overview": "After the devastating events of Avengers: Infinity War, the universe is in ruins due to the efforts of the Mad Titan, Thanos. With the help of remaining allies, the Avengers must assemble once more in order to undo Thanos' actions and restore order to the universe once and for all, no matter what consequences may be in store.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Adventure",
|
||||
"Science Fiction",
|
||||
"Action"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg",
|
||||
"vote_count": 10497
|
||||
},
|
||||
{
|
||||
"id": 100402,
|
||||
"popularity": 16.418,
|
||||
"vote_average": 7.7,
|
||||
"title": "Captain America: The Winter Soldier",
|
||||
"tagline": "In heroes we trust.",
|
||||
"overview": "After the cataclysmic events in New York with The Avengers, Steve Rogers, aka Captain America is living quietly in Washington, D.C. and trying to adjust to the modern world. But when a S.H.I.E.L.D. colleague comes under attack, Steve becomes embroiled in a web of intrigue that threatens to put the world at risk. Joining forces with the Black Widow, Captain America struggles to expose the ever-widening conspiracy while fighting off professional assassins sent to silence him at every turn. When the full scope of the villainous plot is revealed, Captain America and the Black Widow enlist the help of a new ally, the Falcon. However, they soon find themselves up against an unexpected and formidable enemy—the Winter Soldier.",
|
||||
"director": "Anthony Russo",
|
||||
"producer": "Kevin Feige",
|
||||
"genres": [
|
||||
"Action",
|
||||
"Adventure",
|
||||
"Science Fiction"
|
||||
],
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/5TQ6YDmymBpnF005OyoB7ohZps9.jpg",
|
||||
"vote_count": 11972
|
||||
"balance": "$1,977.66",
|
||||
"age": 36,
|
||||
"color": "brown",
|
||||
"name": "Combs Stanley",
|
||||
"gender": "male",
|
||||
"email": "combsstanley@chorizon.com",
|
||||
"phone": "+1 (827) 419-2053",
|
||||
"address": "153 Beverley Road, Siglerville, South Carolina, 3666"
|
||||
}
|
||||
]);
|
||||
|
||||
@ -540,8 +399,7 @@ async fn search_with_settings_searchable_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_settings_displayed_attributes() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let config = json!({
|
||||
"rankingRules": [
|
||||
@ -550,57 +408,57 @@ async fn search_with_settings_displayed_attributes() {
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"desc(popularity)",
|
||||
"desc(age)",
|
||||
"exactness",
|
||||
"desc(vote_average)"
|
||||
"desc(balance)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"overview",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres"
|
||||
"age",
|
||||
"color",
|
||||
"gender",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"id",
|
||||
"overview",
|
||||
"poster_path"
|
||||
"name",
|
||||
"age",
|
||||
"gender",
|
||||
"color",
|
||||
"email",
|
||||
"phone"
|
||||
],
|
||||
"stopWords": null,
|
||||
"synonyms": null,
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(config).await;
|
||||
|
||||
let query = "q=avangers&limit=3";
|
||||
let query = "q=exercitation&limit=3";
|
||||
let expect = json!([
|
||||
{
|
||||
"id": 299536,
|
||||
"title": "Avengers: Infinity War",
|
||||
"tagline": "An entire universe. Once and for all.",
|
||||
"overview": "As the Avengers and their allies have continued to protect the world from threats too large for any one hero to handle, a new danger has emerged from the cosmic shadows: Thanos. A despot of intergalactic infamy, his goal is to collect all six Infinity Stones, artifacts of unimaginable power, and use them to inflict his twisted will on all of reality. Everything the Avengers have fought for has led up to this moment - the fate of Earth and existence itself has never been more uncertain.",
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg"
|
||||
"age": 31,
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
"phone": "+1 (912) 430-3243"
|
||||
},
|
||||
{
|
||||
"id": 299534,
|
||||
"title": "Avengers: Endgame",
|
||||
"tagline": "Part of the journey is the end.",
|
||||
"overview": "After the devastating events of Avengers: Infinity War, the universe is in ruins due to the efforts of the Mad Titan, Thanos. With the help of remaining allies, the Avengers must assemble once more in order to undo Thanos' actions and restore order to the universe once and for all, no matter what consequences may be in store.",
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg"
|
||||
"age": 27,
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
"phone": "+1 (995) 479-3174"
|
||||
},
|
||||
{
|
||||
"id": 99861,
|
||||
"title": "Avengers: Age of Ultron",
|
||||
"tagline": "A New Age Has Come.",
|
||||
"overview": "When Tony Stark tries to jumpstart a dormant peacekeeping program, things go awry and Earth’s Mightiest Heroes are put to the ultimate test as the fate of the planet hangs in the balance. As the villainous Ultron emerges, it is up to The Avengers to stop him from enacting his terrible plans, and soon uneasy alliances and unexpected action pave the way for an epic and unique global adventure.",
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/t90Y3G8UGQp0f0DrP60wRu9gfrH.jpg"
|
||||
"age": 28,
|
||||
"color": "brown",
|
||||
"name": "Maureen Dale",
|
||||
"gender": "female",
|
||||
"email": "maureendale@chorizon.com",
|
||||
"phone": "+1 (984) 538-3684"
|
||||
}
|
||||
]);
|
||||
|
||||
@ -610,8 +468,7 @@ async fn search_with_settings_displayed_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_settings_searchable_attributes_2() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let config = json!({
|
||||
"rankingRules": [
|
||||
@ -620,60 +477,62 @@ async fn search_with_settings_searchable_attributes_2() {
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"desc(popularity)",
|
||||
"desc(age)",
|
||||
"exactness",
|
||||
"desc(vote_average)"
|
||||
"desc(balance)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"tagline",
|
||||
"overview",
|
||||
"title",
|
||||
"cast",
|
||||
"director",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"genres"
|
||||
"age",
|
||||
"color",
|
||||
"gender",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
"id",
|
||||
"overview",
|
||||
"poster_path"
|
||||
"name",
|
||||
"age",
|
||||
"gender"
|
||||
],
|
||||
"stopWords": null,
|
||||
"synonyms": null,
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(config).await;
|
||||
|
||||
let query = "q=avangers&limit=3";
|
||||
let query = "q=exercitation&limit=3";
|
||||
let expect = json!([
|
||||
{
|
||||
"id": 299536,
|
||||
"title": "Avengers: Infinity War",
|
||||
"tagline": "An entire universe. Once and for all.",
|
||||
"overview": "As the Avengers and their allies have continued to protect the world from threats too large for any one hero to handle, a new danger has emerged from the cosmic shadows: Thanos. A despot of intergalactic infamy, his goal is to collect all six Infinity Stones, artifacts of unimaginable power, and use them to inflict his twisted will on all of reality. Everything the Avengers have fought for has led up to this moment - the fate of Earth and existence itself has never been more uncertain.",
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/7WsyChQLEftFiDOVTGkv3hFpyyt.jpg"
|
||||
"age": 31,
|
||||
"name": "Harper Carson",
|
||||
"gender": "male"
|
||||
},
|
||||
{
|
||||
"id": 299534,
|
||||
"title": "Avengers: Endgame",
|
||||
"tagline": "Part of the journey is the end.",
|
||||
"overview": "After the devastating events of Avengers: Infinity War, the universe is in ruins due to the efforts of the Mad Titan, Thanos. With the help of remaining allies, the Avengers must assemble once more in order to undo Thanos' actions and restore order to the universe once and for all, no matter what consequences may be in store.",
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/or06FN3Dka5tukK1e9sl16pB3iy.jpg"
|
||||
"age": 27,
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female"
|
||||
},
|
||||
{
|
||||
"id": 100402,
|
||||
"title": "Captain America: The Winter Soldier",
|
||||
"tagline": "In heroes we trust.",
|
||||
"overview": "After the cataclysmic events in New York with The Avengers, Steve Rogers, aka Captain America is living quietly in Washington, D.C. and trying to adjust to the modern world. But when a S.H.I.E.L.D. colleague comes under attack, Steve becomes embroiled in a web of intrigue that threatens to put the world at risk. Joining forces with the Black Widow, Captain America struggles to expose the ever-widening conspiracy while fighting off professional assassins sent to silence him at every turn. When the full scope of the villainous plot is revealed, Captain America and the Black Widow enlist the help of a new ally, the Falcon. However, they soon find themselves up against an unexpected and formidable enemy—the Winter Soldier.",
|
||||
"poster_path": "https://image.tmdb.org/t/p/w500/5TQ6YDmymBpnF005OyoB7ohZps9.jpg"
|
||||
"age": 28,
|
||||
"name": "Maureen Dale",
|
||||
"gender": "female"
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, _status_code) = server.search_get(query).await;
|
||||
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
|
||||
}
|
||||
|
||||
// issue #798
|
||||
#[actix_rt::test]
|
||||
async fn distinct_attributes_returns_name_not_id() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
let settings = json!({
|
||||
"distinctAttribute": "color",
|
||||
});
|
||||
server.update_all_settings(settings).await;
|
||||
let (response, _) = server.get_all_settings().await;
|
||||
assert_eq!(response["distinctAttribute"], "color");
|
||||
let (response, _) = server.get_distinct_attribute().await;
|
||||
assert_eq!(response, "color");
|
||||
}
|
||||
|
@ -1,13 +1,11 @@
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
use std::convert::Into;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn write_all_and_delete() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
@ -18,40 +16,40 @@ async fn write_all_and_delete() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(rank)",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "movie_id",
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"id",
|
||||
"movie_id",
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
],
|
||||
"stopWords": [
|
||||
"the",
|
||||
"a",
|
||||
"an",
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine"],
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false,
|
||||
"attributesForFaceting": ["name"],
|
||||
});
|
||||
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
@ -78,50 +76,11 @@ async fn write_all_and_delete() {
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"poster_path",
|
||||
"director",
|
||||
"id",
|
||||
"production_companies",
|
||||
"producer",
|
||||
"poster",
|
||||
"movie_id",
|
||||
"vote_count",
|
||||
"cast",
|
||||
"release_date",
|
||||
"vote_average",
|
||||
"rank",
|
||||
"genres",
|
||||
"overview",
|
||||
"description",
|
||||
"tagline",
|
||||
"popularity",
|
||||
"title"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"poster_path",
|
||||
"poster",
|
||||
"vote_count",
|
||||
"id",
|
||||
"movie_id",
|
||||
"title",
|
||||
"rank",
|
||||
"tagline",
|
||||
"cast",
|
||||
"producer",
|
||||
"production_companies",
|
||||
"description",
|
||||
"director",
|
||||
"genres",
|
||||
"release_date",
|
||||
"overview",
|
||||
"vote_average",
|
||||
"popularity"
|
||||
],
|
||||
"searchableAttributes": ["*"],
|
||||
"displayedAttributes": ["*"],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": [],
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
assert_json_eq!(expect, response, ordered: false);
|
||||
@ -129,8 +88,7 @@ async fn write_all_and_delete() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn write_all_and_update() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// 2 - Send the settings
|
||||
|
||||
@ -142,37 +100,38 @@ async fn write_all_and_update() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(rank)",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "movie_id",
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"uid",
|
||||
"movie_id",
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
],
|
||||
"stopWords": [
|
||||
"the",
|
||||
"a",
|
||||
"an",
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine"],
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false,
|
||||
"attributesForFaceting": ["name"],
|
||||
});
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
@ -193,28 +152,27 @@ async fn write_all_and_update() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"uid",
|
||||
"name",
|
||||
"color",
|
||||
"age",
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"release_date",
|
||||
"rank",
|
||||
"poster",
|
||||
"name",
|
||||
"color",
|
||||
"age",
|
||||
"registered",
|
||||
"picture",
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine", "xmen"],
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(body).await;
|
||||
@ -231,28 +189,27 @@ async fn write_all_and_update() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"uid",
|
||||
"name",
|
||||
"color",
|
||||
"age",
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"release_date",
|
||||
"rank",
|
||||
"poster",
|
||||
"name",
|
||||
"color",
|
||||
"age",
|
||||
"registered",
|
||||
"picture",
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine", "xmen"],
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["title"],
|
||||
"acceptNewFields": false
|
||||
});
|
||||
|
||||
assert_json_eq!(expected, response, ordered: false);
|
||||
@ -260,9 +217,9 @@ async fn write_all_and_update() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_default_settings() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"uid": "test",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
@ -278,12 +235,11 @@ async fn test_default_settings() {
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [],
|
||||
"displayedAttributes": [],
|
||||
"searchableAttributes": ["*"],
|
||||
"displayedAttributes": ["*"],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": [],
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
let (response, _status_code) = server.get_all_settings().await;
|
||||
@ -293,9 +249,9 @@ async fn test_default_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_default_settings_2() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
@ -312,16 +268,11 @@ async fn test_default_settings_2() {
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"id"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"id"
|
||||
],
|
||||
"searchableAttributes": ["*"],
|
||||
"displayedAttributes": ["*"],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": [],
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
let (response, _status_code) = server.get_all_settings().await;
|
||||
@ -332,9 +283,9 @@ async fn test_default_settings_2() {
|
||||
// Test issue https://github.com/meilisearch/MeiliSearch/issues/516
|
||||
#[actix_rt::test]
|
||||
async fn write_setting_and_update_partial() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"uid": "test",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
@ -342,20 +293,21 @@ async fn write_setting_and_update_partial() {
|
||||
|
||||
let body = json!({
|
||||
"searchableAttributes": [
|
||||
"uid",
|
||||
"movie_id",
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
]
|
||||
});
|
||||
|
||||
@ -371,20 +323,19 @@ async fn write_setting_and_update_partial() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(rank)",
|
||||
"desc(age)",
|
||||
"desc(registered)",
|
||||
],
|
||||
"distinctAttribute": "movie_id",
|
||||
"distinctAttribute": "id",
|
||||
"stopWords": [
|
||||
"the",
|
||||
"a",
|
||||
"an",
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine"],
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
@ -399,37 +350,37 @@ async fn write_setting_and_update_partial() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(rank)",
|
||||
"desc(age)",
|
||||
"desc(registered)",
|
||||
],
|
||||
"distinctAttribute": "movie_id",
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"uid",
|
||||
"movie_id",
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"title",
|
||||
"description",
|
||||
"poster",
|
||||
"release_date",
|
||||
"rank",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
],
|
||||
"stopWords": [
|
||||
"the",
|
||||
"a",
|
||||
"an",
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine"],
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": [],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
let (response, _status_code) = server.get_all_settings().await;
|
||||
@ -471,17 +422,49 @@ async fn setting_ranking_rules_dont_mess_with_other_settings() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_attribute_recorded_as_known_field() {
|
||||
async fn displayed_and_searchable_attributes_reset_to_wildcard() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
let body = json!({
|
||||
"distinctAttribute": "foobar",
|
||||
"acceptNewFields": true
|
||||
});
|
||||
server.update_all_settings(body).await;
|
||||
let document = json!([{"id": 9348127, "foobar": "hello", "foo": "bar"}]);
|
||||
server.add_or_update_multiple_documents(document).await;
|
||||
// foobar should not be added to the searchable attributes because it is already known, but "foo" should
|
||||
server.update_all_settings(json!({ "searchableAttributes": ["color"], "displayedAttributes": ["color"] })).await;
|
||||
let (response, _) = server.get_all_settings().await;
|
||||
assert!(response["searchableAttributes"].as_array().unwrap().iter().any(|v| v.as_str().unwrap() == "foo"));
|
||||
assert!(!response["searchableAttributes"].as_array().unwrap().iter().any(|v| v.as_str().unwrap() == "foobar"));
|
||||
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap()[0], "color");
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap()[0], "color");
|
||||
|
||||
server.delete_searchable_attributes().await;
|
||||
server.delete_displayed_attributes().await;
|
||||
|
||||
let (response, _) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap()[0], "*");
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap()[0], "*");
|
||||
|
||||
let mut server = common::Server::test_server().await;
|
||||
server.update_all_settings(json!({ "searchableAttributes": ["color"], "displayedAttributes": ["color"] })).await;
|
||||
let (response, _) = server.get_all_settings().await;
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap()[0], "color");
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap()[0], "color");
|
||||
|
||||
server.update_all_settings(json!({ "searchableAttributes": [], "displayedAttributes": [] })).await;
|
||||
|
||||
let (response, _) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap()[0], "*");
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap()[0], "*");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn settings_that_contains_wildcard_is_wildcard() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
server.update_all_settings(json!({ "searchableAttributes": ["color", "*"], "displayedAttributes": ["color", "*"] })).await;
|
||||
|
||||
let (response, _) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap()[0], "*");
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap()[0], "*");
|
||||
}
|
||||
|
@ -1,349 +0,0 @@
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_new_fields_default() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
// 1 - Add a document
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 2 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(1).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
|
||||
// 3 - Add a document with more fields
|
||||
|
||||
let body = json!([{
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 4 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(2).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_new_fields_true() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
// 1 - Set indexNewFields = true
|
||||
|
||||
server.update_accept_new_fields(json!(true)).await;
|
||||
|
||||
// 2 - Add a document
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 3 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(1).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
|
||||
// 4 - Add a document with more fields
|
||||
|
||||
let body = json!([{
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 5 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(2).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_new_fields_false() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
// 1 - Set indexNewFields = false
|
||||
|
||||
server.update_accept_new_fields(json!(false)).await;
|
||||
|
||||
// 2 - Add a document
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 3 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 1,
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(1).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
|
||||
// 4 - Add a document with more fields
|
||||
|
||||
let body = json!([{
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 5 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 2,
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(2).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_new_fields_true_then_false() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
// 1 - Set indexNewFields = true
|
||||
|
||||
server.update_accept_new_fields(json!(true)).await;
|
||||
|
||||
// 2 - Add a document
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 3 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(1).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
|
||||
// 4 - Set indexNewFields = false
|
||||
|
||||
server.update_accept_new_fields(json!(false)).await;
|
||||
|
||||
// 5 - Add a document with more fields
|
||||
|
||||
let body = json!([{
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 6 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(2).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_new_fields_false_then_true() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
// 1 - Set indexNewFields = false
|
||||
|
||||
server.update_accept_new_fields(json!(false)).await;
|
||||
|
||||
// 2 - Add a document
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "I'm a legend",
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 3 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 1,
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(1).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
|
||||
// 4 - Set indexNewFields = false
|
||||
|
||||
server.update_accept_new_fields(json!(true)).await;
|
||||
|
||||
// 5 - Add a document with more fields
|
||||
|
||||
let body = json!([{
|
||||
"id": 2,
|
||||
"title": "I'm not a legend",
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 6 - Get the complete document
|
||||
|
||||
let expected = json!({
|
||||
"id": 1,
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(1).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
|
||||
let expected = json!({
|
||||
"id": 2,
|
||||
"description": "A bad copy of the original movie I'm a lengend"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(2).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_eq!(response, expected);
|
||||
}
|
||||
|
||||
// Fix issue https://github.com/meilisearch/MeiliSearch/issues/518
|
||||
#[actix_rt::test]
|
||||
async fn accept_new_fields_does_not_take_into_account_the_primary_key() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create an index with no primary-key
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
});
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
// 2 - Add searchable and displayed attributes as: ["title"] & Set acceptNewFields to false
|
||||
|
||||
let body = json!({
|
||||
"searchableAttributes": ["title"],
|
||||
"displayedAttributes": ["title"],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
server.update_all_settings(body).await;
|
||||
|
||||
// 4 - Add a document
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 5 - Get settings, they should not changed
|
||||
|
||||
let (response, _status_code) = server.get_all_settings().await;
|
||||
|
||||
let expected = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": ["title"],
|
||||
"displayedAttributes": ["title"],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": [],
|
||||
"acceptNewFields": false,
|
||||
});
|
||||
|
||||
assert_json_eq!(response, expected, ordered: false);
|
||||
}
|
@ -5,8 +5,7 @@ mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn write_all_and_delete() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// 2 - Send the settings
|
||||
|
||||
@ -17,8 +16,8 @@ async fn write_all_and_delete() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(rank)",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
]);
|
||||
|
||||
server.update_ranking_rules(body.clone()).await;
|
||||
@ -51,8 +50,7 @@ async fn write_all_and_delete() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn write_all_and_update() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// 2 - Send the settings
|
||||
|
||||
@ -63,8 +61,8 @@ async fn write_all_and_update() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(rank)",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
]);
|
||||
|
||||
server.update_ranking_rules(body.clone()).await;
|
||||
@ -84,7 +82,7 @@ async fn write_all_and_update() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(registered)",
|
||||
]);
|
||||
|
||||
server.update_ranking_rules(body).await;
|
||||
@ -100,7 +98,7 @@ async fn write_all_and_update() {
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(release_date)",
|
||||
"desc(registered)",
|
||||
]);
|
||||
|
||||
assert_json_eq!(expected, response, ordered: false);
|
||||
@ -108,9 +106,9 @@ async fn write_all_and_update() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn send_undefined_rule() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
@ -123,9 +121,9 @@ async fn send_undefined_rule() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn send_malformed_custom_rule() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
@ -139,16 +137,16 @@ async fn send_malformed_custom_rule() {
|
||||
// Test issue https://github.com/meilisearch/MeiliSearch/issues/521
|
||||
#[actix_rt::test]
|
||||
async fn write_custom_ranking_and_index_documents() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body).await;
|
||||
|
||||
// 1 - Add ranking rules with one custom ranking on a string
|
||||
|
||||
let body = json!(["asc(title)", "typo"]);
|
||||
let body = json!(["asc(name)", "typo"]);
|
||||
|
||||
server.update_ranking_rules(body).await;
|
||||
|
||||
@ -157,13 +155,13 @@ async fn write_custom_ranking_and_index_documents() {
|
||||
let body = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Le Petit Prince",
|
||||
"author": "Exupéry"
|
||||
"name": "Cherry Orr",
|
||||
"color": "green"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"title": "Pride and Prejudice",
|
||||
"author": "Jane Austen"
|
||||
"name": "Lucas Hess",
|
||||
"color": "yellow"
|
||||
}
|
||||
]);
|
||||
|
||||
@ -173,7 +171,8 @@ async fn write_custom_ranking_and_index_documents() {
|
||||
|
||||
let expected = json!({
|
||||
"id": 1,
|
||||
"author": "Exupéry"
|
||||
"name": "Cherry Orr",
|
||||
"color": "green"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_document(1).await;
|
||||
|
@ -5,8 +5,7 @@ mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_stop_words() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// 1 - Get stop words
|
||||
|
||||
@ -15,7 +14,7 @@ async fn update_stop_words() {
|
||||
|
||||
// 2 - Update stop words
|
||||
|
||||
let body = json!(["the", "a"]);
|
||||
let body = json!(["ut", "ea"]);
|
||||
server.update_stop_words(body.clone()).await;
|
||||
|
||||
// 3 - Get all stop words and compare to the previous one
|
||||
@ -35,22 +34,21 @@ async fn update_stop_words() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_and_stop_words() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
server.populate_movies().await;
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// 2 - Update stop words
|
||||
|
||||
let body = json!(["the", "of"]);
|
||||
let body = json!(["ad", "in"]);
|
||||
server.update_stop_words(body.clone()).await;
|
||||
|
||||
// 3 - Search for a document with stop words
|
||||
|
||||
let (response, _status_code) = server.search_get("q=the%20mask").await;
|
||||
let (response, _status_code) = server.search_get("q=in%20exercitation").await;
|
||||
assert!(!response["hits"].as_array().unwrap().is_empty());
|
||||
|
||||
// 4 - Search for documents with *only* stop words
|
||||
|
||||
let (response, _status_code) = server.search_get("q=the%20of").await;
|
||||
let (response, _status_code) = server.search_get("q=ad%20in").await;
|
||||
assert!(response["hits"].as_array().unwrap().is_empty());
|
||||
|
||||
// 5 - Delete all stop words
|
||||
|
@ -1,13 +1,13 @@
|
||||
[package]
|
||||
name = "meilisearch-schema"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.11.1" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.14.1" }
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
zerocopy = "0.3.0"
|
||||
|
@ -16,7 +16,7 @@ impl fmt::Display for Error {
|
||||
use self::Error::*;
|
||||
match self {
|
||||
FieldNameNotFound(field) => write!(f, "The field {:?} doesn't exist", field),
|
||||
PrimaryKeyAlreadyPresent => write!(f, "The schema already have an primary key. It's impossible to update it"),
|
||||
PrimaryKeyAlreadyPresent => write!(f, "A primary key is already present. It's impossible to update it"),
|
||||
MaxFieldsLimitExceeded => write!(f, "The maximum of possible reattributed field id has been reached"),
|
||||
}
|
||||
}
|
||||
|
@ -1,32 +1,58 @@
|
||||
use crate::{FieldsMap, FieldId, SResult, Error, IndexedPos};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::borrow::Cow;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
enum OptionAll<T> {
|
||||
All,
|
||||
Some(T),
|
||||
None,
|
||||
}
|
||||
|
||||
impl<T> OptionAll<T> {
|
||||
// replace the value with None and return the previous value
|
||||
fn take(&mut self) -> OptionAll<T> {
|
||||
std::mem::replace(self, OptionAll::None)
|
||||
}
|
||||
|
||||
fn map<U, F: FnOnce(T) -> U>(self, f: F) -> OptionAll<U> {
|
||||
match self {
|
||||
OptionAll::Some(x) => OptionAll::Some(f(x)),
|
||||
OptionAll::All => OptionAll::All,
|
||||
OptionAll::None => OptionAll::None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_all(&self) -> bool {
|
||||
match self {
|
||||
OptionAll::All => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Default for OptionAll<T> {
|
||||
fn default() -> OptionAll<T> {
|
||||
OptionAll::All
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Default)]
|
||||
pub struct Schema {
|
||||
fields_map: FieldsMap,
|
||||
|
||||
primary_key: Option<FieldId>,
|
||||
ranked: HashSet<FieldId>,
|
||||
displayed: HashSet<FieldId>,
|
||||
displayed: OptionAll<HashSet<FieldId>>,
|
||||
|
||||
indexed: Vec<FieldId>,
|
||||
indexed: OptionAll<Vec<FieldId>>,
|
||||
indexed_map: HashMap<FieldId, IndexedPos>,
|
||||
|
||||
accept_new_fields: bool,
|
||||
}
|
||||
|
||||
impl Schema {
|
||||
pub fn new() -> Schema {
|
||||
Schema {
|
||||
fields_map: FieldsMap::default(),
|
||||
primary_key: None,
|
||||
ranked: HashSet::new(),
|
||||
displayed: HashSet::new(),
|
||||
indexed: Vec::new(),
|
||||
indexed_map: HashMap::new(),
|
||||
accept_new_fields: true,
|
||||
}
|
||||
Schema::default()
|
||||
}
|
||||
|
||||
pub fn with_primary_key(name: &str) -> Schema {
|
||||
@ -34,21 +60,18 @@ impl Schema {
|
||||
let field_id = fields_map.insert(name).unwrap();
|
||||
|
||||
let mut displayed = HashSet::new();
|
||||
let mut indexed = Vec::new();
|
||||
let mut indexed_map = HashMap::new();
|
||||
|
||||
displayed.insert(field_id);
|
||||
indexed.push(field_id);
|
||||
indexed_map.insert(field_id, 0.into());
|
||||
|
||||
Schema {
|
||||
fields_map,
|
||||
primary_key: Some(field_id),
|
||||
ranked: HashSet::new(),
|
||||
displayed,
|
||||
indexed,
|
||||
displayed: OptionAll::All,
|
||||
indexed: OptionAll::All,
|
||||
indexed_map,
|
||||
accept_new_fields: true,
|
||||
}
|
||||
}
|
||||
|
||||
@ -63,10 +86,8 @@ impl Schema {
|
||||
|
||||
let id = self.insert(name)?;
|
||||
self.primary_key = Some(id);
|
||||
if self.accept_new_fields {
|
||||
self.set_indexed(name)?;
|
||||
self.set_displayed(name)?;
|
||||
}
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
@ -97,12 +118,8 @@ impl Schema {
|
||||
Ok(id)
|
||||
}
|
||||
None => {
|
||||
if self.accept_new_fields {
|
||||
self.set_indexed(name)?;
|
||||
self.set_displayed(name)
|
||||
} else {
|
||||
self.fields_map.insert(name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -115,20 +132,50 @@ impl Schema {
|
||||
self.ranked.iter().filter_map(|a| self.name(*a)).collect()
|
||||
}
|
||||
|
||||
pub fn displayed(&self) -> &HashSet<FieldId> {
|
||||
&self.displayed
|
||||
pub fn displayed(&self) -> Cow<HashSet<FieldId>> {
|
||||
match self.displayed {
|
||||
OptionAll::Some(ref v) => Cow::Borrowed(v),
|
||||
OptionAll::All => {
|
||||
let fields = self
|
||||
.fields_map
|
||||
.iter()
|
||||
.map(|(_, &v)| v)
|
||||
.collect::<HashSet<_>>();
|
||||
Cow::Owned(fields)
|
||||
}
|
||||
OptionAll::None => Cow::Owned(HashSet::new())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_displayed_all(&self) -> bool {
|
||||
self.displayed.is_all()
|
||||
}
|
||||
|
||||
pub fn displayed_name(&self) -> HashSet<&str> {
|
||||
self.displayed.iter().filter_map(|a| self.name(*a)).collect()
|
||||
match self.displayed {
|
||||
OptionAll::All => self.fields_map.iter().filter_map(|(_, &v)| self.name(v)).collect(),
|
||||
OptionAll::Some(ref v) => v.iter().filter_map(|a| self.name(*a)).collect(),
|
||||
OptionAll::None => HashSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indexed(&self) -> &Vec<FieldId> {
|
||||
&self.indexed
|
||||
pub fn indexed(&self) -> Cow<[FieldId]> {
|
||||
match self.indexed {
|
||||
OptionAll::Some(ref v) => Cow::Borrowed(v),
|
||||
OptionAll::All => {
|
||||
let fields = self
|
||||
.fields_map
|
||||
.iter()
|
||||
.map(|(_, &f)| f)
|
||||
.collect();
|
||||
Cow::Owned(fields)
|
||||
},
|
||||
OptionAll::None => Cow::Owned(Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indexed_name(&self) -> Vec<&str> {
|
||||
self.indexed.iter().filter_map(|a| self.name(*a)).collect()
|
||||
self.indexed().iter().filter_map(|a| self.name(*a)).collect()
|
||||
}
|
||||
|
||||
pub fn set_ranked(&mut self, name: &str) -> SResult<FieldId> {
|
||||
@ -139,18 +186,33 @@ impl Schema {
|
||||
|
||||
pub fn set_displayed(&mut self, name: &str) -> SResult<FieldId> {
|
||||
let id = self.fields_map.insert(name)?;
|
||||
self.displayed.insert(id);
|
||||
self.displayed = match self.displayed.take() {
|
||||
OptionAll::All => OptionAll::All,
|
||||
OptionAll::None => {
|
||||
let mut displayed = HashSet::new();
|
||||
displayed.insert(id);
|
||||
OptionAll::Some(displayed)
|
||||
},
|
||||
OptionAll::Some(mut v) => {
|
||||
v.insert(id);
|
||||
OptionAll::Some(v)
|
||||
}
|
||||
};
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
pub fn set_indexed(&mut self, name: &str) -> SResult<(FieldId, IndexedPos)> {
|
||||
let id = self.fields_map.insert(name)?;
|
||||
|
||||
if let Some(indexed_pos) = self.indexed_map.get(&id) {
|
||||
return Ok((id, *indexed_pos))
|
||||
};
|
||||
let pos = self.indexed.len() as u16;
|
||||
self.indexed.push(id);
|
||||
let pos = self.indexed_map.len() as u16;
|
||||
self.indexed_map.insert(id, pos.into());
|
||||
self.indexed = self.indexed.take().map(|mut v| {
|
||||
v.push(id);
|
||||
v
|
||||
});
|
||||
Ok((id, pos.into()))
|
||||
}
|
||||
|
||||
@ -164,16 +226,47 @@ impl Schema {
|
||||
}
|
||||
}
|
||||
|
||||
/// remove field from displayed attributes. If diplayed attributes is OptionAll::All,
|
||||
/// dipslayed attributes is turned into OptionAll::Some(v) where v is all displayed attributes
|
||||
/// except name.
|
||||
pub fn remove_displayed(&mut self, name: &str) {
|
||||
if let Some(id) = self.fields_map.id(name) {
|
||||
self.displayed.remove(&id);
|
||||
self.displayed = match self.displayed.take() {
|
||||
OptionAll::Some(mut v) => {
|
||||
v.remove(&id);
|
||||
OptionAll::Some(v)
|
||||
}
|
||||
OptionAll::All => {
|
||||
let displayed = self.fields_map
|
||||
.iter()
|
||||
.filter_map(|(key, &value)| {
|
||||
if key != name {
|
||||
Some(value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
OptionAll::Some(displayed)
|
||||
}
|
||||
OptionAll::None => OptionAll::None,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove_indexed(&mut self, name: &str) {
|
||||
if let Some(id) = self.fields_map.id(name) {
|
||||
self.indexed_map.remove(&id);
|
||||
self.indexed.retain(|x| *x != id);
|
||||
self.indexed = match self.indexed.take() {
|
||||
// valid because indexed is All and indexed() return the content of
|
||||
// indexed_map that is already updated
|
||||
OptionAll::All => OptionAll::Some(self.indexed().into_owned()),
|
||||
OptionAll::Some(mut v) => {
|
||||
v.retain(|x| *x != id);
|
||||
OptionAll::Some(v)
|
||||
}
|
||||
OptionAll::None => OptionAll::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -182,20 +275,28 @@ impl Schema {
|
||||
}
|
||||
|
||||
pub fn is_displayed(&self, id: FieldId) -> bool {
|
||||
self.displayed.get(&id).is_some()
|
||||
match self.displayed {
|
||||
OptionAll::Some(ref v) => v.contains(&id),
|
||||
OptionAll::All => true,
|
||||
OptionAll::None => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_indexed(&self, id: FieldId) -> Option<&IndexedPos> {
|
||||
self.indexed_map.get(&id)
|
||||
}
|
||||
|
||||
pub fn indexed_pos_to_field_id<I: Into<IndexedPos>>(&self, pos: I) -> Option<FieldId> {
|
||||
let indexed_pos = pos.into().0 as usize;
|
||||
if indexed_pos < self.indexed.len() {
|
||||
Some(self.indexed[indexed_pos as usize])
|
||||
} else {
|
||||
None
|
||||
pub fn is_indexed_all(&self) -> bool {
|
||||
self.indexed.is_all()
|
||||
}
|
||||
|
||||
pub fn indexed_pos_to_field_id<I: Into<IndexedPos>>(&self, pos: I) -> Option<FieldId> {
|
||||
let indexed_pos = pos.into().0;
|
||||
self
|
||||
.indexed_map
|
||||
.iter()
|
||||
.find(|(_, &v)| v.0 == indexed_pos)
|
||||
.map(|(&k, _)| k)
|
||||
}
|
||||
|
||||
pub fn update_ranked<S: AsRef<str>>(&mut self, data: impl IntoIterator<Item = S>) -> SResult<()> {
|
||||
@ -207,7 +308,13 @@ impl Schema {
|
||||
}
|
||||
|
||||
pub fn update_displayed<S: AsRef<str>>(&mut self, data: impl IntoIterator<Item = S>) -> SResult<()> {
|
||||
self.displayed.clear();
|
||||
self.displayed = match self.displayed.take() {
|
||||
OptionAll::Some(mut v) => {
|
||||
v.clear();
|
||||
OptionAll::Some(v)
|
||||
}
|
||||
_ => OptionAll::Some(HashSet::new())
|
||||
};
|
||||
for name in data {
|
||||
self.set_displayed(name.as_ref())?;
|
||||
}
|
||||
@ -215,7 +322,13 @@ impl Schema {
|
||||
}
|
||||
|
||||
pub fn update_indexed<S: AsRef<str>>(&mut self, data: Vec<S>) -> SResult<()> {
|
||||
self.indexed.clear();
|
||||
self.indexed = match self.indexed.take() {
|
||||
OptionAll::Some(mut v) => {
|
||||
v.clear();
|
||||
OptionAll::Some(v)
|
||||
},
|
||||
_ => OptionAll::Some(Vec::new()),
|
||||
};
|
||||
self.indexed_map.clear();
|
||||
for name in data {
|
||||
self.set_indexed(name.as_ref())?;
|
||||
@ -224,29 +337,16 @@ impl Schema {
|
||||
}
|
||||
|
||||
pub fn set_all_fields_as_indexed(&mut self) {
|
||||
self.indexed.clear();
|
||||
self.indexed = OptionAll::All;
|
||||
self.indexed_map.clear();
|
||||
|
||||
for (_name, id) in self.fields_map.iter() {
|
||||
let pos = self.indexed.len() as u16;
|
||||
self.indexed.push(*id);
|
||||
let pos = self.indexed_map.len() as u16;
|
||||
self.indexed_map.insert(*id, pos.into());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_all_fields_as_displayed(&mut self) {
|
||||
self.displayed.clear();
|
||||
|
||||
for (_name, id) in self.fields_map.iter() {
|
||||
self.displayed.insert(*id);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn accept_new_fields(&self) -> bool {
|
||||
self.accept_new_fields
|
||||
}
|
||||
|
||||
pub fn set_accept_new_fields(&mut self, value: bool) {
|
||||
self.accept_new_fields = value;
|
||||
self.displayed = OptionAll::All
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-tokenizer"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
@ -101,11 +101,14 @@ pub fn split_query_string(query: &str) -> impl Iterator<Item = &str> {
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub struct Token<'a> {
|
||||
pub word: &'a str,
|
||||
/// index of the token in the token sequence
|
||||
pub index: usize,
|
||||
pub word_index: usize,
|
||||
pub char_index: usize,
|
||||
}
|
||||
|
||||
pub struct Tokenizer<'a> {
|
||||
count: usize,
|
||||
inner: &'a str,
|
||||
word_index: usize,
|
||||
char_index: usize,
|
||||
@ -121,6 +124,7 @@ impl<'a> Tokenizer<'a> {
|
||||
.fold((0, 0), chars_count_index);
|
||||
|
||||
Tokenizer {
|
||||
count: 0,
|
||||
inner: &string[index..],
|
||||
word_index: 0,
|
||||
char_index: count,
|
||||
@ -150,6 +154,7 @@ impl<'a> Iterator for Tokenizer<'a> {
|
||||
|
||||
let token = Token {
|
||||
word: string,
|
||||
index: self.count,
|
||||
word_index: self.word_index,
|
||||
char_index: self.char_index,
|
||||
};
|
||||
@ -158,6 +163,7 @@ impl<'a> Iterator for Tokenizer<'a> {
|
||||
self.word_index += 1;
|
||||
}
|
||||
|
||||
self.count += 1;
|
||||
self.char_index += count;
|
||||
self.inner = &self.inner[index..];
|
||||
|
||||
@ -175,6 +181,7 @@ where
|
||||
{
|
||||
inner: I,
|
||||
current: Option<Peekable<Tokenizer<'a>>>,
|
||||
count: usize,
|
||||
word_offset: usize,
|
||||
char_offset: usize,
|
||||
}
|
||||
@ -188,6 +195,7 @@ where
|
||||
SeqTokenizer {
|
||||
inner: iter,
|
||||
current,
|
||||
count: 0,
|
||||
word_offset: 0,
|
||||
char_offset: 0,
|
||||
}
|
||||
@ -209,6 +217,7 @@ where
|
||||
// to the token before returning it
|
||||
let token = Token {
|
||||
word: token.word,
|
||||
index: self.count,
|
||||
word_index: token.word_index + self.word_offset,
|
||||
char_index: token.char_index + self.char_offset,
|
||||
};
|
||||
@ -249,6 +258,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "salut",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 0
|
||||
})
|
||||
@ -261,6 +271,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "yo",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 0
|
||||
})
|
||||
@ -276,6 +287,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "yo",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 4
|
||||
})
|
||||
@ -284,6 +296,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "lolo",
|
||||
index: 1,
|
||||
word_index: 1,
|
||||
char_index: 7
|
||||
})
|
||||
@ -292,6 +305,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "aïe",
|
||||
index: 2,
|
||||
word_index: 9,
|
||||
char_index: 13
|
||||
})
|
||||
@ -300,6 +314,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "ouch",
|
||||
index: 3,
|
||||
word_index: 17,
|
||||
char_index: 18
|
||||
})
|
||||
@ -312,6 +327,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "yo",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 0
|
||||
})
|
||||
@ -320,6 +336,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "lolo",
|
||||
index: 1,
|
||||
word_index: 8,
|
||||
char_index: 5
|
||||
})
|
||||
@ -328,6 +345,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "wtf",
|
||||
index: 2,
|
||||
word_index: 16,
|
||||
char_index: 12
|
||||
})
|
||||
@ -336,6 +354,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "lol",
|
||||
index: 3,
|
||||
word_index: 17,
|
||||
char_index: 18
|
||||
})
|
||||
@ -344,6 +363,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "aïe",
|
||||
index: 4,
|
||||
word_index: 25,
|
||||
char_index: 24
|
||||
})
|
||||
@ -359,6 +379,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "yo",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 4
|
||||
})
|
||||
@ -367,6 +388,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "😂",
|
||||
index: 1,
|
||||
word_index: 1,
|
||||
char_index: 7
|
||||
})
|
||||
@ -375,6 +397,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "aïe",
|
||||
index: 2,
|
||||
word_index: 9,
|
||||
char_index: 10
|
||||
})
|
||||
@ -387,6 +410,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "yo",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 0
|
||||
})
|
||||
@ -395,6 +419,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "lolo",
|
||||
index: 1,
|
||||
word_index: 8,
|
||||
char_index: 5
|
||||
})
|
||||
@ -403,6 +428,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "😱",
|
||||
index: 2,
|
||||
word_index: 16,
|
||||
char_index: 12
|
||||
})
|
||||
@ -411,6 +437,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "lol",
|
||||
index: 3,
|
||||
word_index: 17,
|
||||
char_index: 16
|
||||
})
|
||||
@ -419,6 +446,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "😣",
|
||||
index: 4,
|
||||
word_index: 25,
|
||||
char_index: 22
|
||||
})
|
||||
@ -434,6 +462,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "\u{2ec4}",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 0
|
||||
})
|
||||
@ -442,6 +471,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "lolilol",
|
||||
index: 1,
|
||||
word_index: 1,
|
||||
char_index: 1
|
||||
})
|
||||
@ -450,6 +480,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "\u{2ec7}",
|
||||
index: 2,
|
||||
word_index: 2,
|
||||
char_index: 8
|
||||
})
|
||||
@ -462,6 +493,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "\u{2ec4}",
|
||||
index: 0,
|
||||
word_index: 0,
|
||||
char_index: 0
|
||||
})
|
||||
@ -470,6 +502,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "\u{2ed3}",
|
||||
index: 1,
|
||||
word_index: 1,
|
||||
char_index: 1
|
||||
})
|
||||
@ -478,6 +511,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "\u{2ef2}",
|
||||
index: 2,
|
||||
word_index: 2,
|
||||
char_index: 2
|
||||
})
|
||||
@ -486,6 +520,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "lolilol",
|
||||
index: 3,
|
||||
word_index: 3,
|
||||
char_index: 4
|
||||
})
|
||||
@ -494,6 +529,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "hello",
|
||||
index: 4,
|
||||
word_index: 4,
|
||||
char_index: 14
|
||||
})
|
||||
@ -502,6 +538,7 @@ mod tests {
|
||||
tokenizer.next(),
|
||||
Some(Token {
|
||||
word: "\u{2ec7}",
|
||||
index: 5,
|
||||
word_index: 5,
|
||||
char_index: 23
|
||||
})
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-types"
|
||||
version = "0.11.1"
|
||||
version = "0.14.1"
|
||||
license = "MIT"
|
||||
authors = ["Clément Renault <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
Reference in New Issue
Block a user