mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-21 05:41:01 +00:00
Compare commits
176 Commits
release-v0
...
v0.17.0rc0
Author | SHA1 | Date | |
---|---|---|---|
1944dd70c7 | |||
3ec76ac33d | |||
72bc22dfd1 | |||
b8e677efd2 | |||
65079f5e2e | |||
cfb21b94e8 | |||
f564a9ce51 | |||
85d0a914ac | |||
d3e7e18b7d | |||
d6c76b02e3 | |||
fe3e20751c | |||
aab041e692 | |||
75e22fc7f5 | |||
6fff49b33b | |||
2eaab48532 | |||
43df4a56c4 | |||
680756500c | |||
0645a6568e | |||
3a0861694d | |||
0f4182bddf | |||
cc4284b89e | |||
a326466f32 | |||
5a67862e00 | |||
201bb3f80a | |||
49afe7d89f | |||
f968d039f7 | |||
705669ddf8 | |||
73dd345cda | |||
65c6e46775 | |||
7a1d003341 | |||
6a2a56d48f | |||
9ff5bdd297 | |||
4ba5e22f64 | |||
a8ab15d65d | |||
93953103ad | |||
f25890c140 | |||
39cf1931ae | |||
bbb6771625 | |||
e9f9f270e1 | |||
190b78b7be | |||
257f9fb2b2 | |||
d35a104ad3 | |||
9bae7a35bf | |||
33c7c5a7e3 | |||
91363daeaa | |||
f9ab85adbe | |||
9dbf43d3e7 | |||
772f4d6671 | |||
1b57218739 | |||
8767269b47 | |||
baceaed582 | |||
62a28bc2a1 | |||
f83caa6c40 | |||
53b1483e71 | |||
a0eafea200 | |||
10dace305d | |||
1eace79f77 | |||
e6033e174d | |||
f1925b8f71 | |||
834f3cc192 | |||
e049aead16 | |||
0a9c9670e7 | |||
1744dcebfe | |||
29712916e6 | |||
4d2783bb04 | |||
50f0fbb05c | |||
5a842ec94a | |||
372680e2ab | |||
6465a3f549 | |||
690eab4a25 | |||
dc2e5ceed2 | |||
1639a7338d | |||
ac7226bb27 | |||
086020e543 | |||
452d456fad | |||
f741942226 | |||
a27399cf65 | |||
29b8810db8 | |||
a5a47911d1 | |||
7bf6a3d7b2 | |||
0cabcb7c79 | |||
f359b64d59 | |||
2f3ecab8d9 | |||
17f71a1a55 | |||
bfe3bb0eeb | |||
0a67248bfe | |||
2644f087d0 | |||
91c8c7a2e3 | |||
029abd3413 | |||
726756bad4 | |||
10c56d9919 | |||
5f59f93804 | |||
704defea78 | |||
eb240c8b60 | |||
c3bcd7a410 | |||
26124e6436 | |||
3cd6f5c7ea | |||
7c646e031c | |||
0a2ca075d3 | |||
b406b6ee44 | |||
726e867058 | |||
f4d918d22a | |||
5ef3a01b6c | |||
5a98f1f076 | |||
4398f2c023 | |||
afc3b0915b | |||
f313de98c8 | |||
03d4651077 | |||
32f6a9a457 | |||
099a0802fc | |||
e258e0b2c2 | |||
c254320860 | |||
51fd849852 | |||
ab170ce4fd | |||
90226dc8a9 | |||
63868b2600 | |||
22d439f682 | |||
394f2abd49 | |||
030bcd8b05 | |||
d8d29d3615 | |||
efe5984d54 | |||
63260e6443 | |||
a794970b72 | |||
ba0f44e361 | |||
4acaecd921 | |||
84a3e95fa4 | |||
f045e111ea | |||
87a76c2a60 | |||
4edaebab90 | |||
b43137b508 | |||
0ca44b6a82 | |||
ae2de4d0c4 | |||
e47b4acd08 | |||
a07c3743f0 | |||
954f572e79 | |||
733c02dd7c | |||
c94daf8c3d | |||
6db51ed8b2 | |||
118c673eaf | |||
a9a2d3bca3 | |||
4a9e56aa4f | |||
14bb9505eb | |||
d937aeac0a | |||
dd540d2540 | |||
4ecaf99047 | |||
445a6c9ea2 | |||
67b7d60cb0 | |||
94b3e8e56e | |||
89b5ae63fc | |||
2a79dc9ded | |||
5ed62dbf76 | |||
cb267b68ed | |||
6539be6c46 | |||
a23bdb31a3 | |||
9014290875 | |||
1903302a74 | |||
75c3cb4bb6 | |||
bfd0f806f8 | |||
afab8a7846 | |||
afacdbc7a0 | |||
18a50b4dac | |||
fb69769991 | |||
750e7382c6 | |||
2464cc7a6d | |||
f078cbac4d | |||
aa545e5386 | |||
9711100ff1 | |||
8c49ee1b3b | |||
476aecf86d | |||
bd5d25429b | |||
4f2b68eef1 | |||
5f1ca15a7c | |||
e1002862a9 | |||
4ae2097cdc | |||
1f2ab71bb6 | |||
9c0956049a |
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
16
.github/workflows/check-updated-changelog.yml
vendored
16
.github/workflows/check-updated-changelog.yml
vendored
@ -1,16 +0,0 @@
|
||||
name: Check if the CHANGELOG.md has been updated
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: Test on ${{ matrix.os }}
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'ignore-changelog') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Checking the CHANGELOG.md has been updated in this PR
|
||||
run: |
|
||||
set -e
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
git diff --name-only origin/${{ github.base_ref }} | grep -q CHANGELOG.md
|
34
.github/workflows/coverage.yml
vendored
Normal file
34
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
---
|
||||
on:
|
||||
pull_request:
|
||||
types: [review_requested, ready_for_review]
|
||||
|
||||
name: Execute code coverage
|
||||
|
||||
jobs:
|
||||
nightly-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clean
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --all-features --no-fail-fast
|
||||
env:
|
||||
CARGO_INCREMENTAL: "0"
|
||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
||||
- uses: actions-rs/grcov@v0.1
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
file: ${{ steps.coverage.outputs.report }}
|
||||
yml: ./codecov.yml
|
||||
fail_ci_if_error: true
|
5
.github/workflows/test.yml
vendored
5
.github/workflows/test.yml
vendored
@ -30,6 +30,11 @@ jobs:
|
||||
with:
|
||||
command: test
|
||||
args: --locked --release
|
||||
- name: Run cargo test dump
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: dump --locked --release -- --ignored --test-threads 1
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
|
35
CHANGELOG.md
35
CHANGELOG.md
@ -1,15 +1,48 @@
|
||||
## v0.17.0
|
||||
- Fix corrupted data during placeholder search (#1089)
|
||||
- Remove maintenance error from http (#1082)
|
||||
- Disable frontend in production (#1097)
|
||||
- Update nbHits count with filtered documents (#849)
|
||||
- Remove update changelog ci check (#1090)
|
||||
- Add deploy on Platform.sh option to README (#1087)
|
||||
- Change movie gifs in README (#1077)
|
||||
- Remove some clippy warnings (#1100)
|
||||
- Improve script `download-latest.sh` (#1054)
|
||||
- Bump dependencies version (#1056, #1057, #1059)
|
||||
|
||||
## v0.16.0
|
||||
|
||||
- Automatically create index on document push if index doesn't exist (#914)
|
||||
- Sort displayedAttributes and facetDistribution (#946)
|
||||
|
||||
## v0.15.0
|
||||
|
||||
- Update actix-web dependency to 3.0.0 (#963)
|
||||
- Consider an empty query to be a placeholder search (#916)
|
||||
|
||||
## v0.14.1
|
||||
|
||||
- Fix version mismatch in snapshot importation (#959)
|
||||
|
||||
## v0.14.0
|
||||
|
||||
- Sort displayedAttributes (#943)
|
||||
- Fix facet distribution case (#797)
|
||||
- Snapshotting (#839)
|
||||
- Fix bucket-sort unwrap bug (#915)
|
||||
|
||||
## v0.13.0
|
||||
|
||||
- placeholder search (#771)
|
||||
- Add database version mismatch check (#794)
|
||||
- Displayed and searchable attributes wildcard (#846)
|
||||
- Remove sys-info route (#810)
|
||||
- Fix facet distribution case (#797)
|
||||
- Check database version mismatch (#794)
|
||||
- Fix unique docid bug (#841)
|
||||
- Error codes in updates (#792)
|
||||
- Sentry disable argument (#813)
|
||||
- Log analytics if enabled (#825)
|
||||
- Fix default values displayed on web interface (#874)
|
||||
|
||||
## v0.12.0
|
||||
|
||||
|
112
CONTRIBUTING.md
Normal file
112
CONTRIBUTING.md
Normal file
@ -0,0 +1,112 @@
|
||||
# Contributing
|
||||
|
||||
First, thank you for contributing to MeiliSearch! The goal of this document is to
|
||||
provide everything you need to start contributing to MeiliSearch. The
|
||||
following TOC is sorted progressively, starting with the basics and
|
||||
expanding into more specifics.
|
||||
|
||||
<!-- MarkdownTOC autolink="true" style="ordered" indent=" " -->
|
||||
|
||||
1. [Assumptions](#assumptions)
|
||||
1. [Your First Contribution](#your-first-contribution)
|
||||
1. [Change Control](#change-control)
|
||||
1. [Git Branches](#git-branches)
|
||||
1. [Git Commits](#git-commits)
|
||||
1. [Style](#style)
|
||||
1. [Github Pull Requests](#github-pull-requests)
|
||||
1. [Reviews & Approvals](#reviews--approvals)
|
||||
1. [Merge Style](#merge-style)
|
||||
1. [CI](#ci)
|
||||
1. [Development](#development)
|
||||
1. [Setup](#setup)
|
||||
1. [Testing](#testing)
|
||||
1. [Benchmarking](#benchmarking--profiling)
|
||||
1. [Humans](#humans)
|
||||
1. [Documentation](#documentation)
|
||||
1. [Changelog](#changelog)
|
||||
|
||||
<!-- /MarkdownTOC -->
|
||||
|
||||
## Assumptions
|
||||
|
||||
1. **You're familiar with [Github](https://github.com) and the [pull request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)
|
||||
workflow.**
|
||||
2. **You've read the MeiliSearch [docs](https://docs.meilisearch.com).**
|
||||
3. **You know about the [MeiliSearch community](https://docs.meilisearch.com/resources/contact.html).
|
||||
Please use this for help.**
|
||||
|
||||
## Your First Contribution
|
||||
|
||||
1. Ensure your change has an issue! Find an
|
||||
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
||||
* This is where you can get a feel if the change will be accepted or not.
|
||||
2. Once approved, [fork the MeiliSearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own
|
||||
Github account.
|
||||
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
||||
4. Review the MeiliSearch [workflow](#workflow) and [development](#development).
|
||||
5. Make your changes.
|
||||
6. [Submit the branch as a pull request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) to the main MeiliSearch
|
||||
repo. A MeiliSearch team member should comment and/or review your pull request
|
||||
with a few days. Although, depending on the circumstances, it may take
|
||||
longer.
|
||||
|
||||
## Change Control
|
||||
|
||||
### Git Branches
|
||||
|
||||
_All_ changes must be made in a branch and submitted as [pull requests](#pull-requests).
|
||||
MeiliSearch does not adopt any type of branch naming style, but please use something
|
||||
descriptive of your changes.
|
||||
|
||||
### Git Commits
|
||||
|
||||
#### Style
|
||||
|
||||
Please ensure your commits are small and focused; they should tell a story of
|
||||
your change. This helps reviewers to follow your changes, especially for more
|
||||
complex changes.
|
||||
|
||||
Familiarise yourself with [How to Write a Git Commit Message](https://chris.beams.io/posts/git-commit/).
|
||||
|
||||
### Github Pull Requests
|
||||
|
||||
Once your changes are ready you must submit your branch as a pull request.
|
||||
|
||||
#### Reviews & Approvals
|
||||
|
||||
All pull requests must be reviewed and approved by at least one MeiliSearch team
|
||||
member.
|
||||
|
||||
#### Merge Style
|
||||
|
||||
All pull requests are squashed and merged. We generally discourage large pull
|
||||
requests that are over 300-500 lines of diff. If you would like to propose
|
||||
a change that is larger we suggest coming onto our chat channel and
|
||||
discuss it with one of our engineers. This way we can talk through the
|
||||
solution and discuss if a change that large is even needed! This overall
|
||||
will produce a quicker response to the change and likely produce code that
|
||||
aligns better with our process.
|
||||
|
||||
## Development
|
||||
|
||||
### Setup
|
||||
|
||||
See the [MeiliSearch Docs](https://docs.meilisearch.com/guides/advanced_guides/installation.html) for how to set up a development environment.
|
||||
|
||||
### Benchmarking & Profiling
|
||||
|
||||
We do not yet do any benchmarking, nor have we formalised our profiling. If you'd like to work on this please get in touch!
|
||||
|
||||
## Humans
|
||||
|
||||
After making your change, you'll want to prepare it for MeiliSearch users (mostly humans). This usually entails updating documentation and announcing your feature.
|
||||
|
||||
### Documentation
|
||||
|
||||
Documentation is very important to MeiliSearch. All contributions that
|
||||
alter user-facing behavior MUST include documentation changes. Please see
|
||||
[GitHub.com/meilisearch/documentation](https://github.com/meilisearch/documentation) for more info.
|
||||
|
||||
### Changelog
|
||||
|
||||
Until we have guidelines in place, updating the [`Changelog`](/CHANGELOG.md) is solely the responsibility of MeiliSearch team members.
|
1368
Cargo.lock
generated
1368
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
71
README.md
71
README.md
@ -2,7 +2,6 @@
|
||||
<img src="assets/logo.svg" alt="MeiliSearch" width="200" height="200" />
|
||||
</p>
|
||||
|
||||
|
||||
<h1 align="center">MeiliSearch</h1>
|
||||
|
||||
<h4 align="center">
|
||||
@ -29,45 +28,53 @@
|
||||
For more information about features go to [our documentation](https://docs.meilisearch.com/).
|
||||
|
||||
<p align="center">
|
||||
<a href="https://crates.meilisearch.com"><img src="assets/crates-io-demo.gif" alt="crates.io demo gif" /></a>
|
||||
<img src="assets/trumen_quick_loop.gif" alt="Web interface gif" />
|
||||
</p>
|
||||
|
||||
> MeiliSearch helps the Rust community find crates on [crates.meilisearch.com](https://crates.meilisearch.com)
|
||||
|
||||
## Features
|
||||
## ✨ Features
|
||||
* Search as-you-type experience (answers < 50 milliseconds)
|
||||
* Full-text search
|
||||
* Typo tolerant (understands typos and miss-spelling)
|
||||
* Faceted search and filters
|
||||
* Supports Kanji characters
|
||||
* Supports Synonym
|
||||
* Easy to install, deploy, and maintain
|
||||
* Whole documents are returned
|
||||
* Highly customizable
|
||||
* RESTful API
|
||||
* Faceted search and filtering
|
||||
|
||||
## Get started
|
||||
## Getting started
|
||||
|
||||
### Deploy the Server
|
||||
|
||||
#### Run it using Digital Ocean
|
||||
|
||||
[](https://marketplace.digitalocean.com/apps/meilisearch?action=deploy&refcode=7c67bd97e101)
|
||||
|
||||
#### Run it using Docker
|
||||
|
||||
```bash
|
||||
docker run -p 7700:7700 -v $(pwd)/data.ms:/data.ms getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Installing with Homebrew
|
||||
#### Brew (Mac OS)
|
||||
|
||||
```bash
|
||||
brew update && brew install meilisearch
|
||||
meilisearch
|
||||
```
|
||||
|
||||
#### Installing with APT
|
||||
#### Docker
|
||||
|
||||
```bash
|
||||
docker run -p 7700:7700 -v $(pwd)/data.ms:/data.ms getmeili/meilisearch
|
||||
```
|
||||
|
||||
#### Try MeiliSearch in our Sandbox
|
||||
|
||||
Create a MeiliSearch instance in [MeiliSearch Sandbox](https://sandbox.meilisearch.com/). This instance is free, and will be active for 72 hours.
|
||||
|
||||
#### Run on Digital Ocean
|
||||
|
||||
[](https://marketplace.digitalocean.com/apps/meilisearch?action=deploy&refcode=7c67bd97e101)
|
||||
|
||||
#### Deploy on Platform.sh
|
||||
|
||||
<a href="https://console.platform.sh/projects/create-project?template=https://raw.githubusercontent.com/platformsh/template-builder/master/templates/meilisearch/.platform.template.yaml&utm_content=meilisearch&utm_source=github&utm_medium=button&utm_campaign=deploy_on_platform">
|
||||
<img src="https://platform.sh/images/deploy/lg-blue.svg" alt="Deploy on Platform.sh" width="180px" />
|
||||
</a>
|
||||
|
||||
#### APT (Debian & Ubuntu)
|
||||
|
||||
```bash
|
||||
echo "deb [trusted=yes] https://apt.fury.io/meilisearch/ /" > /etc/apt/sources.list.d/fury.list
|
||||
@ -75,7 +82,7 @@ apt update && apt install meilisearch-http
|
||||
meilisearch
|
||||
```
|
||||
|
||||
#### Download the binary
|
||||
#### Download the binary (Linux & Mac OS)
|
||||
|
||||
```bash
|
||||
curl -L https://install.meilisearch.com | sh
|
||||
@ -84,7 +91,7 @@ curl -L https://install.meilisearch.com | sh
|
||||
|
||||
#### Compile and run it from sources
|
||||
|
||||
If you have the Rust toolchain already installed on your local system, clone the repository and change it to your working directory.
|
||||
If you have the latest stable Rust toolchain installed on your local system, clone the repository and change it to your working directory.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/meilisearch/MeiliSearch.git
|
||||
@ -165,33 +172,31 @@ We also deliver an **out-of-the-box web interface** in which you can test MeiliS
|
||||
|
||||
You can access the web interface in your web browser at the root of the server. The default URL is [http://127.0.0.1:7700](http://127.0.0.1:7700). All you need to do is open your web browser and enter MeiliSearch’s address to visit it. This will lead you to a web page with a search bar that will allow you to search in the selected index.
|
||||
|
||||
<p align="center">
|
||||
<img src="assets/movies-web-demo.gif" alt="Web interface gif" />
|
||||
</p>
|
||||
| [See the gif above](#demo)
|
||||
|
||||
### Documentation
|
||||
## Documentation
|
||||
|
||||
Now that your MeiliSearch server is up and running, you can learn more about how to tune your search engine in [the documentation](https://docs.meilisearch.com).
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
Hey! We're glad you're thinking about contributing to MeiliSearch! If you think something is missing or could be improved, please open issues and pull requests. If you'd like to help this project grow, we'd love to have you! To start contributing, checking [issues tagged as "good-first-issue"](https://github.com/meilisearch/MeiliSearch/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) is a good start!
|
||||
|
||||
### Analytic Events
|
||||
## Telemetry
|
||||
|
||||
Every hour, events are being sent to our Amplitude instance so we can know how many people are using MeiliSearch.<br/>
|
||||
MeiliSearch collects anonymous data regarding general usage.
|
||||
This helps us better understand developers usage of MeiliSearch features.<br/>
|
||||
To see what information we're retrieving, please see the complete list [on the dedicated issue](https://github.com/meilisearch/MeiliSearch/issues/720).<br/>
|
||||
We also use Sentry to make us crash and error reports. If you want to know more about what Sentry collects, please visit their [privacy policy website](https://sentry.io/privacy/).<br/>
|
||||
If this doesn't suit you, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
|
||||
This program is optional, you can disable these analytics by using the `MEILI_NO_ANALYTICS` env variable.
|
||||
|
||||
## Contact
|
||||
## đź’Ś Contact
|
||||
|
||||
Feel free to contact us about any questions you may have:
|
||||
* At [bonjour@meilisearch.com](mailto:bonjour@meilisearch.com): English or French is welcome! 🇬🇧 🇫🇷
|
||||
* At [bonjour@meilisearch.com](mailto:bonjour@meilisearch.com)
|
||||
* Via the chat box available on every page of [our documentation](https://docs.meilisearch.com/) and on [our landing page](https://www.meilisearch.com/).
|
||||
* 🆕 Join our [GitHub Discussions forum](https://github.com/meilisearch/MeiliSearch/discussions) (BETA hype!)
|
||||
* 🆕 Join our [GitHub Discussions forum](https://github.com/meilisearch/MeiliSearch/discussions)
|
||||
* Join our [Slack community](https://slack.meilisearch.com/).
|
||||
* By opening an issue.
|
||||
|
||||
Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
MeiliSearch is developed by [Meili](https://www.meilisearch.com), a young company. To know more about us, you can [read our blog](https://blog.meilisearch.com). Any suggestion or feedback is highly appreciated. Thank you for your support!
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 5.3 MiB |
BIN
assets/trumen_quick_loop.gif
Normal file
BIN
assets/trumen_quick_loop.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.2 MiB |
@ -1 +1 @@
|
||||
_datas in movies.csv are from https://www.themoviedb.org/_
|
||||
_datas in movies.json are from https://www.themoviedb.org/_
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,10 +0,0 @@
|
||||
{
|
||||
"searchableAttributes": ["title", "overview"],
|
||||
"displayedAttributes": [
|
||||
"id",
|
||||
"title",
|
||||
"overview",
|
||||
"release_date",
|
||||
"poster"
|
||||
]
|
||||
}
|
@ -71,7 +71,7 @@ semverLT() {
|
||||
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||
get_latest() {
|
||||
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file"
|
||||
curl -s 'https://api.github.com/repos/meilisearch/MeiliSearch/releases' > "$temp_file" || return 1
|
||||
releases=$(cat "$temp_file" | \
|
||||
grep -E "tag_name|draft|prerelease" \
|
||||
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||
@ -168,16 +168,17 @@ failure_usage() {
|
||||
|
||||
# MAIN
|
||||
latest="$(get_latest)"
|
||||
get_os
|
||||
if [ "$?" -eq 1 ]; then
|
||||
|
||||
if ! get_os; then
|
||||
failure_usage
|
||||
exit 1
|
||||
fi
|
||||
get_archi
|
||||
if [ "$?" -eq 1 ]; then
|
||||
|
||||
if ! get_archi; then
|
||||
failure_usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Downloading MeiliSearch binary $latest for $os, architecture $archi..."
|
||||
release_file="meilisearch-$os-$archi"
|
||||
link="https://github.com/meilisearch/MeiliSearch/releases/download/$latest/$release_file"
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-core"
|
||||
version = "0.13.0"
|
||||
version = "0.17.0"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
@ -9,43 +9,43 @@ edition = "2018"
|
||||
arc-swap = "0.4.5"
|
||||
bincode = "1.2.1"
|
||||
byteorder = "1.3.4"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
compact_arena = "0.4.0"
|
||||
cow-utils = "0.1.2"
|
||||
crossbeam-channel = "0.4.2"
|
||||
crossbeam-channel = "0.5.0"
|
||||
deunicode = "1.1.0"
|
||||
either = "1.5.3"
|
||||
env_logger = "0.7.1"
|
||||
fst = "0.4.4"
|
||||
hashbrown = { version = "0.7.1", features = ["serde"] }
|
||||
hashbrown = { version = "0.9.0", features = ["serde"] }
|
||||
heed = "0.8.0"
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
intervaltree = "0.2.5"
|
||||
itertools = "0.9.0"
|
||||
levenshtein_automata = { version = "0.2.0", features = ["fst_automaton"] }
|
||||
log = "0.4.8"
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.13.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.13.0" }
|
||||
meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.13.0" }
|
||||
meilisearch-types = { path = "../meilisearch-types", version = "0.13.0" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.17.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.17.0" }
|
||||
meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.17.0" }
|
||||
meilisearch-types = { path = "../meilisearch-types", version = "0.17.0" }
|
||||
once_cell = "1.3.1"
|
||||
ordered-float = { version = "1.0.2", features = ["serde"] }
|
||||
pest = { git = "https://github.com/MarinPostma/pest.git", tag = "meilisearch-patch1" }
|
||||
ordered-float = { version = "2.0.0", features = ["serde"] }
|
||||
pest = { git = "https://github.com/pest-parser/pest.git", rev = "51fd1d49f1041f7839975664ef71fe15c7dcaf67" }
|
||||
pest_derive = "2.0"
|
||||
regex = "1.3.6"
|
||||
regex = "1.4.1"
|
||||
sdset = "0.4.0"
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
serde = { version = "1.0.117", features = ["derive"] }
|
||||
serde_json = { version = "1.0.59", features = ["preserve_order"] }
|
||||
slice-group-by = "0.2.6"
|
||||
unicase = "2.6.0"
|
||||
zerocopy = "0.3.0"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_matches = "1.3.0"
|
||||
assert_matches = "1.4.0"
|
||||
criterion = "0.3.1"
|
||||
csv = "1.1.3"
|
||||
rustyline = { version = "6.0.0", default-features = false }
|
||||
structopt = "0.3.12"
|
||||
structopt = "0.3.20"
|
||||
tempfile = "3.1.0"
|
||||
termcolor = "1.1.0"
|
||||
|
||||
|
@ -98,7 +98,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
|
||||
let bench_name = BenchmarkId::from_parameter(format!("{:?}", query));
|
||||
group.bench_with_input(bench_name, &query, |b, query| b.iter(|| {
|
||||
let builder = index.query_builder();
|
||||
builder.query(&reader, query, 0..20).unwrap();
|
||||
builder.query(&reader, Some(*query), 0..20).unwrap();
|
||||
}));
|
||||
}
|
||||
group.finish();
|
||||
|
@ -349,8 +349,8 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
|
||||
|
||||
if let Some(ref filter) = command.filter {
|
||||
let filter = filter.as_str();
|
||||
let (positive, filter) = if filter.chars().next() == Some('!') {
|
||||
(false, &filter[1..])
|
||||
let (positive, filter) = if let Some(stripped) = filter.strip_prefix('!') {
|
||||
(false, stripped)
|
||||
} else {
|
||||
(true, filter)
|
||||
};
|
||||
|
@ -9,7 +9,7 @@ use std::time::Instant;
|
||||
use std::fmt;
|
||||
|
||||
use compact_arena::{SmallArena, Idx32, mk_arena};
|
||||
use log::debug;
|
||||
use log::{debug, error};
|
||||
use sdset::{Set, SetBuf, exponential_search, SetOperation, Counter, duo::OpBuilder};
|
||||
use slice_group_by::{GroupBy, GroupByMut};
|
||||
|
||||
@ -39,7 +39,7 @@ pub fn bucket_sort<'c, FI>(
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, (&str, Cow<Set<DocumentId>>)>>>,
|
||||
filter: Option<FI>,
|
||||
criteria: Criteria<'c>,
|
||||
searchable_attrs: Option<ReorderedAttrs>,
|
||||
@ -199,7 +199,7 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
|
||||
query: &str,
|
||||
range: Range<usize>,
|
||||
facets_docids: Option<SetBuf<DocumentId>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>>,
|
||||
facet_count_docids: Option<HashMap<String, HashMap<String, (&str, Cow<Set<DocumentId>>)>>>,
|
||||
filter: Option<FI>,
|
||||
distinct: FD,
|
||||
distinct_size: usize,
|
||||
@ -212,6 +212,7 @@ where
|
||||
FD: Fn(DocumentId) -> Option<u64>,
|
||||
{
|
||||
let mut result = SortResult::default();
|
||||
let mut filtered_count = 0;
|
||||
|
||||
let words_set = index.main.words_fst(reader)?;
|
||||
let stop_words = index.main.stop_words_fst(reader)?;
|
||||
@ -322,19 +323,36 @@ where
|
||||
let filter_accepted = match &filter {
|
||||
Some(filter) => {
|
||||
let entry = filter_map.entry(document.id);
|
||||
*entry.or_insert_with(|| (filter)(document.id))
|
||||
*entry.or_insert_with(|| {
|
||||
let accepted = (filter)(document.id);
|
||||
// we only want to count it out the first time we see it
|
||||
if !accepted {
|
||||
filtered_count += 1;
|
||||
}
|
||||
accepted
|
||||
})
|
||||
}
|
||||
None => true,
|
||||
};
|
||||
|
||||
if filter_accepted {
|
||||
let entry = key_cache.entry(document.id);
|
||||
let key = entry.or_insert_with(|| (distinct)(document.id).map(Rc::new));
|
||||
let mut seen = true;
|
||||
let key = entry.or_insert_with(|| {
|
||||
seen = false;
|
||||
(distinct)(document.id).map(Rc::new)
|
||||
});
|
||||
|
||||
match key.clone() {
|
||||
let distinct = match key.clone() {
|
||||
Some(key) => buf_distinct.register(key),
|
||||
None => buf_distinct.register_without_key(),
|
||||
};
|
||||
|
||||
// we only want to count the document if it is the first time we see it and
|
||||
// if it wasn't accepted by distinct
|
||||
if !seen && !distinct {
|
||||
filtered_count += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// the requested range end is reached: stop computing distinct
|
||||
@ -370,12 +388,18 @@ where
|
||||
let mut documents = Vec::with_capacity(range.len());
|
||||
for raw_document in raw_documents.into_iter().skip(distinct_raw_offset) {
|
||||
let filter_accepted = match &filter {
|
||||
Some(_) => filter_map.remove(&raw_document.id).unwrap(),
|
||||
Some(_) => filter_map.remove(&raw_document.id).unwrap_or_else(|| {
|
||||
error!("error during filtering: expected value for document id {}", &raw_document.id.0);
|
||||
Default::default()
|
||||
}),
|
||||
None => true,
|
||||
};
|
||||
|
||||
if filter_accepted {
|
||||
let key = key_cache.remove(&raw_document.id).unwrap();
|
||||
let key = key_cache.remove(&raw_document.id).unwrap_or_else(|| {
|
||||
error!("error during distinct: expected value for document id {}", &raw_document.id.0);
|
||||
Default::default()
|
||||
});
|
||||
let distinct_accepted = match key {
|
||||
Some(key) => seen.register(key),
|
||||
None => seen.register_without_key(),
|
||||
@ -390,7 +414,7 @@ where
|
||||
}
|
||||
}
|
||||
result.documents = documents;
|
||||
result.nb_hits = docids.len();
|
||||
result.nb_hits = docids.len() - filtered_count;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
@ -637,17 +661,17 @@ pub fn placeholder_document_sort(
|
||||
|
||||
/// For each entry in facet_docids, calculates the number of documents in the intersection with candidate_docids.
|
||||
pub fn facet_count(
|
||||
facet_docids: HashMap<String, HashMap<String, Cow<Set<DocumentId>>>>,
|
||||
facet_docids: HashMap<String, HashMap<String, (&str, Cow<Set<DocumentId>>)>>,
|
||||
candidate_docids: &Set<DocumentId>,
|
||||
) -> HashMap<String, HashMap<String, usize>> {
|
||||
let mut facets_counts = HashMap::with_capacity(facet_docids.len());
|
||||
for (key, doc_map) in facet_docids {
|
||||
let mut count_map = HashMap::with_capacity(doc_map.len());
|
||||
for (value, docids) in doc_map {
|
||||
for (_, (value, docids)) in doc_map {
|
||||
let mut counter = Counter::new();
|
||||
let op = OpBuilder::new(docids.as_ref(), candidate_docids).intersection();
|
||||
SetOperation::<DocumentId>::extend_collection(op, &mut counter);
|
||||
count_map.insert(value, counter.0);
|
||||
count_map.insert(value.to_string(), counter.0);
|
||||
}
|
||||
facets_counts.insert(key, count_map);
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::collections::hash_map::{Entry, HashMap};
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, RwLock};
|
||||
@ -27,7 +28,6 @@ pub type MainReader = heed::RoTxn<MainT>;
|
||||
pub type UpdateWriter<'a> = heed::RwTxn<'a, UpdateT>;
|
||||
pub type UpdateReader = heed::RoTxn<UpdateT>;
|
||||
|
||||
const UNHEALTHY_KEY: &str = "_is_unhealthy";
|
||||
const LAST_UPDATE_KEY: &str = "last-update";
|
||||
|
||||
pub struct MainT;
|
||||
@ -40,6 +40,7 @@ pub struct Database {
|
||||
indexes_store: heed::Database<Str, Unit>,
|
||||
indexes: RwLock<HashMap<String, (Index, thread::JoinHandle<MResult<()>>)>>,
|
||||
update_fn: Arc<ArcSwapFn>,
|
||||
database_version: (u32, u32, u32),
|
||||
}
|
||||
|
||||
pub struct DatabaseOptions {
|
||||
@ -165,7 +166,7 @@ fn update_awaiter(
|
||||
|
||||
/// Ensures Meilisearch version is compatible with the database, returns an error versions mismatch.
|
||||
/// If create is set to true, a VERSION file is created with the current version.
|
||||
fn version_guard(path: &Path, create: bool) -> MResult<()> {
|
||||
fn version_guard(path: &Path, create: bool) -> MResult<(u32, u32, u32)> {
|
||||
let current_version_major = env!("CARGO_PKG_VERSION_MAJOR");
|
||||
let current_version_minor = env!("CARGO_PKG_VERSION_MINOR");
|
||||
let current_version_patch = env!("CARGO_PKG_VERSION_PATCH");
|
||||
@ -182,13 +183,20 @@ fn version_guard(path: &Path, create: bool) -> MResult<()> {
|
||||
let version = re
|
||||
.captures_iter(&version)
|
||||
.next()
|
||||
.ok_or(Error::VersionMismatch("bad VERSION file".to_string()))?;
|
||||
.ok_or_else(|| Error::VersionMismatch("bad VERSION file".to_string()))?;
|
||||
// the first is always the complete match, safe to unwrap because we have a match
|
||||
let version_major = version.get(1).unwrap().as_str();
|
||||
let version_minor = version.get(2).unwrap().as_str();
|
||||
let version_patch = version.get(3).unwrap().as_str();
|
||||
|
||||
if version_major != current_version_major || version_minor != current_version_minor {
|
||||
return Err(Error::VersionMismatch(format!("{}.{}.XX", version_major, version_minor)));
|
||||
Err(Error::VersionMismatch(format!("{}.{}.XX", version_major, version_minor)))
|
||||
} else {
|
||||
Ok((
|
||||
version_major.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
version_minor.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
version_patch.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?
|
||||
))
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
@ -202,17 +210,22 @@ fn version_guard(path: &Path, create: bool) -> MResult<()> {
|
||||
current_version_major,
|
||||
current_version_minor,
|
||||
current_version_patch).as_bytes())?;
|
||||
|
||||
Ok((
|
||||
current_version_major.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
current_version_minor.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?,
|
||||
current_version_patch.parse().map_err(|e| Error::VersionMismatch(format!("error parsing database version: {}", e)))?
|
||||
))
|
||||
} else {
|
||||
// when no version file is found and we were not told to create one, this
|
||||
// means that the version is inferior to the one this feature was added in.
|
||||
return Err(Error::VersionMismatch(format!("<0.12.0")));
|
||||
Err(Error::VersionMismatch("<0.12.0".to_string()))
|
||||
}
|
||||
}
|
||||
_ => return Err(error.into())
|
||||
_ => Err(error.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Database {
|
||||
@ -224,7 +237,7 @@ impl Database {
|
||||
fs::create_dir_all(&path)?;
|
||||
|
||||
// create file only if main db wasn't created before (first run)
|
||||
version_guard(path.as_ref(), !main_path.exists() && !update_path.exists())?;
|
||||
let database_version = version_guard(path.as_ref(), !main_path.exists() && !update_path.exists())?;
|
||||
|
||||
fs::create_dir_all(&main_path)?;
|
||||
let env = heed::EnvOpenOptions::new()
|
||||
@ -302,6 +315,7 @@ impl Database {
|
||||
indexes_store,
|
||||
indexes: RwLock::new(indexes),
|
||||
update_fn,
|
||||
database_version,
|
||||
})
|
||||
}
|
||||
|
||||
@ -469,10 +483,19 @@ impl Database {
|
||||
|
||||
let env_path = path.join("main");
|
||||
let env_update_path = path.join("update");
|
||||
let env_version_path = path.join("VERSION");
|
||||
|
||||
fs::create_dir(&env_path)?;
|
||||
fs::create_dir(&env_update_path)?;
|
||||
|
||||
// write Database Version
|
||||
let (current_version_major, current_version_minor, current_version_patch) = self.database_version;
|
||||
let mut version_file = File::create(&env_version_path)?;
|
||||
version_file.write_all(format!("{}.{}.{}",
|
||||
current_version_major,
|
||||
current_version_minor,
|
||||
current_version_patch).as_bytes())?;
|
||||
|
||||
let env_path = env_path.join("data.mdb");
|
||||
let env_file = self.env.copy_to_path(&env_path, CompactionOption::Enabled)?;
|
||||
|
||||
@ -509,23 +532,6 @@ impl Database {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_healthy(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
|
||||
let common_store = self.common_store();
|
||||
common_store.delete::<_, Str>(writer, UNHEALTHY_KEY)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_unhealthy(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
|
||||
let common_store = self.common_store();
|
||||
common_store.put::<_, Str, Unit>(writer, UNHEALTHY_KEY, &())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_health(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<()>> {
|
||||
let common_store = self.common_store();
|
||||
Ok(common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY)?)
|
||||
}
|
||||
|
||||
pub fn compute_stats(&self, writer: &mut MainWriter, index_uid: &str) -> MResult<()> {
|
||||
let index = match self.open_index(&index_uid) {
|
||||
Some(index) => index,
|
||||
@ -554,7 +560,7 @@ impl Database {
|
||||
}
|
||||
|
||||
// convert attributes to their names
|
||||
let frequency: HashMap<_, _> = fields_frequency
|
||||
let frequency: BTreeMap<_, _> = fields_frequency
|
||||
.into_iter()
|
||||
.filter_map(|(a, c)| schema.name(a).map(|name| (name.to_string(), c)))
|
||||
.collect();
|
||||
@ -563,6 +569,8 @@ impl Database {
|
||||
.main
|
||||
.put_fields_distribution(writer, &frequency)
|
||||
}
|
||||
|
||||
pub fn version(&self) -> (u32, u32, u32) { self.database_version }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -164,7 +164,7 @@ impl<'a> heed::BytesDecode<'a> for FacetKey {
|
||||
}
|
||||
|
||||
pub fn add_to_facet_map(
|
||||
facet_map: &mut HashMap<FacetKey, Vec<DocumentId>>,
|
||||
facet_map: &mut HashMap<FacetKey, (String, Vec<DocumentId>)>,
|
||||
field_id: FieldId,
|
||||
value: Value,
|
||||
document_id: DocumentId,
|
||||
@ -175,8 +175,8 @@ pub fn add_to_facet_map(
|
||||
Value::Null => return Ok(()),
|
||||
value => return Err(FacetError::InvalidDocumentAttribute(value.to_string())),
|
||||
};
|
||||
let key = FacetKey::new(field_id, value);
|
||||
facet_map.entry(key).or_insert_with(Vec::new).push(document_id);
|
||||
let key = FacetKey::new(field_id, value.clone());
|
||||
facet_map.entry(key).or_insert_with(|| (value, Vec::new())).1.push(document_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -185,8 +185,10 @@ pub fn facet_map_from_docids(
|
||||
index: &crate::Index,
|
||||
document_ids: &[DocumentId],
|
||||
attributes_for_facetting: &[FieldId],
|
||||
) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> {
|
||||
let mut facet_map = HashMap::new();
|
||||
) -> MResult<HashMap<FacetKey, (String, Vec<DocumentId>)>> {
|
||||
// A hashmap that ascociate a facet key to a pair containing the original facet attribute
|
||||
// string with it's case preserved, and a list of document ids for that facet attribute.
|
||||
let mut facet_map: HashMap<FacetKey, (String, Vec<DocumentId>)> = HashMap::new();
|
||||
for document_id in document_ids {
|
||||
for result in index
|
||||
.documents_fields
|
||||
@ -212,7 +214,7 @@ pub fn facet_map_from_docs(
|
||||
schema: &Schema,
|
||||
documents: &HashMap<DocumentId, IndexMap<String, Value>>,
|
||||
attributes_for_facetting: &[FieldId],
|
||||
) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> {
|
||||
) -> MResult<HashMap<FacetKey, (String, Vec<DocumentId>)>> {
|
||||
let mut facet_map = HashMap::new();
|
||||
let attributes_for_facetting = attributes_for_facetting
|
||||
.iter()
|
||||
|
@ -97,16 +97,14 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
.unwrap_or_default();
|
||||
ors.push(docids);
|
||||
}
|
||||
let sets: Vec<_> = ors.iter().map(Cow::deref).collect();
|
||||
let or_result = sdset::multi::OpBuilder::from_vec(sets)
|
||||
.union()
|
||||
.into_set_buf();
|
||||
let sets: Vec<_> = ors.iter().map(|(_, i)| i).map(Cow::deref).collect();
|
||||
let or_result = sdset::multi::OpBuilder::from_vec(sets).union().into_set_buf();
|
||||
ands.push(Cow::Owned(or_result));
|
||||
ors.clear();
|
||||
}
|
||||
Either::Right(key) => {
|
||||
match self.index.facets.facet_document_ids(reader, &key)? {
|
||||
Some(docids) => ands.push(docids),
|
||||
Some((_name, docids)) => ands.push(docids),
|
||||
// no candidates for search, early return.
|
||||
None => return Ok(Some(SetBuf::default())),
|
||||
}
|
||||
@ -206,7 +204,7 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
}
|
||||
}
|
||||
|
||||
fn facet_count_docids<'a>(&self, reader: &'a MainReader) -> MResult<Option<HashMap<String, HashMap<String, Cow<'a, Set<DocumentId>>>>>> {
|
||||
fn facet_count_docids<'a>(&self, reader: &'a MainReader) -> MResult<Option<HashMap<String, HashMap<String, (&'a str, Cow<'a, Set<DocumentId>>)>>>> {
|
||||
match self.facets {
|
||||
Some(ref field_ids) => {
|
||||
let mut facet_count_map = HashMap::new();
|
||||
@ -227,10 +225,17 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
|
||||
fn sort_result_from_docids(&self, docids: &[DocumentId], range: Range<usize>) -> SortResult {
|
||||
let mut sort_result = SortResult::default();
|
||||
let mut filtered_count = 0;
|
||||
let mut result = match self.filter {
|
||||
Some(ref filter) => docids
|
||||
.iter()
|
||||
.filter(|item| (filter)(**item))
|
||||
.filter(|item| {
|
||||
let accepted = (filter)(**item);
|
||||
if !accepted {
|
||||
filtered_count += 1;
|
||||
}
|
||||
accepted
|
||||
})
|
||||
.skip(range.start)
|
||||
.take(range.end - range.start)
|
||||
.map(|&id| Document::from_highlights(id, &[]))
|
||||
@ -250,15 +255,19 @@ impl<'c, 'f, 'd, 'i> QueryBuilder<'c, 'f, 'd, 'i> {
|
||||
result.retain(|doc| {
|
||||
let id = doc.id;
|
||||
let key = (distinct)(id);
|
||||
match key {
|
||||
let distinct_accepted = match key {
|
||||
Some(key) => distinct_map.register(key),
|
||||
None => distinct_map.register_without_key(),
|
||||
};
|
||||
if !distinct_accepted {
|
||||
filtered_count += 1;
|
||||
}
|
||||
distinct_accepted
|
||||
});
|
||||
}
|
||||
|
||||
sort_result.documents = result;
|
||||
sort_result.nb_hits = docids.len();
|
||||
sort_result.nb_hits = docids.len() - filtered_count;
|
||||
sort_result
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::str::FromStr;
|
||||
use std::iter::IntoIterator;
|
||||
|
||||
@ -23,7 +23,7 @@ pub struct Settings {
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub displayed_attributes: Option<Option<HashSet<String>>>,
|
||||
pub displayed_attributes: Option<Option<BTreeSet<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
pub stop_words: Option<Option<BTreeSet<String>>>,
|
||||
#[serde(default, deserialize_with = "deserialize_some")]
|
||||
@ -161,7 +161,7 @@ pub struct SettingsUpdate {
|
||||
pub distinct_attribute: UpdateState<String>,
|
||||
pub primary_key: UpdateState<String>,
|
||||
pub searchable_attributes: UpdateState<Vec<String>>,
|
||||
pub displayed_attributes: UpdateState<HashSet<String>>,
|
||||
pub displayed_attributes: UpdateState<BTreeSet<String>>,
|
||||
pub stop_words: UpdateState<BTreeSet<String>>,
|
||||
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
|
||||
pub attributes_for_faceting: UpdateState<Vec<String>>,
|
||||
|
@ -1,12 +1,14 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::mem;
|
||||
|
||||
use heed::{RwTxn, RoTxn, Result as ZResult, RoRange};
|
||||
use heed::{RwTxn, RoTxn, RoRange, types::Str, BytesEncode, BytesDecode};
|
||||
use sdset::{SetBuf, Set, SetOperation};
|
||||
|
||||
use meilisearch_types::DocumentId;
|
||||
use meilisearch_schema::FieldId;
|
||||
|
||||
use crate::MResult;
|
||||
use crate::database::MainT;
|
||||
use crate::facets::FacetKey;
|
||||
use super::cow_set::CowSet;
|
||||
@ -14,45 +16,82 @@ use super::cow_set::CowSet;
|
||||
/// contains facet info
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct Facets {
|
||||
pub(crate) facets: heed::Database<FacetKey, CowSet<DocumentId>>,
|
||||
pub(crate) facets: heed::Database<FacetKey, FacetData>,
|
||||
}
|
||||
|
||||
pub struct FacetData;
|
||||
|
||||
impl<'a> BytesEncode<'a> for FacetData {
|
||||
type EItem = (&'a str, &'a Set<DocumentId>);
|
||||
|
||||
fn bytes_encode(item: &'a Self::EItem) -> Option<Cow<'a, [u8]>> {
|
||||
// get size of the first item
|
||||
let first_size = item.0.as_bytes().len();
|
||||
let size = mem::size_of::<u64>()
|
||||
+ first_size
|
||||
+ item.1.len() * mem::size_of::<DocumentId>();
|
||||
let mut buffer = Vec::with_capacity(size);
|
||||
// encode the length of the first item
|
||||
buffer.extend_from_slice(&first_size.to_be_bytes());
|
||||
buffer.extend_from_slice(Str::bytes_encode(&item.0)?.as_ref());
|
||||
let second_slice = CowSet::bytes_encode(&item.1)?;
|
||||
buffer.extend_from_slice(second_slice.as_ref());
|
||||
Some(Cow::Owned(buffer))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> BytesDecode<'a> for FacetData {
|
||||
type DItem = (&'a str, Cow<'a, Set<DocumentId>>);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||
const LEN: usize = mem::size_of::<u64>();
|
||||
let mut size_buf = [0; LEN];
|
||||
size_buf.copy_from_slice(bytes.get(0..LEN)?);
|
||||
// decode size of the first item from the bytes
|
||||
let first_size = usize::from_be_bytes(size_buf);
|
||||
// decode first and second items
|
||||
let first_item = Str::bytes_decode(bytes.get(LEN..(LEN + first_size))?)?;
|
||||
let second_item = CowSet::bytes_decode(bytes.get((LEN + first_size)..)?)?;
|
||||
Some((first_item, second_item))
|
||||
}
|
||||
}
|
||||
|
||||
impl Facets {
|
||||
// we use sdset::SetBuf to ensure the docids are sorted.
|
||||
pub fn put_facet_document_ids(&self, writer: &mut RwTxn<MainT>, facet_key: FacetKey, doc_ids: &Set<DocumentId>) -> ZResult<()> {
|
||||
self.facets.put(writer, &facet_key, doc_ids)
|
||||
pub fn put_facet_document_ids(&self, writer: &mut RwTxn<MainT>, facet_key: FacetKey, doc_ids: &Set<DocumentId>, facet_value: &str) -> MResult<()> {
|
||||
Ok(self.facets.put(writer, &facet_key, &(facet_value, doc_ids))?)
|
||||
}
|
||||
|
||||
pub fn field_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, field_id: FieldId) -> ZResult<RoRange<'txn, FacetKey, CowSet<DocumentId>>> {
|
||||
self.facets.prefix_iter(reader, &FacetKey::new(field_id, String::new()))
|
||||
pub fn field_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, field_id: FieldId) -> MResult<RoRange<'txn, FacetKey, FacetData>> {
|
||||
Ok(self.facets.prefix_iter(reader, &FacetKey::new(field_id, String::new()))?)
|
||||
}
|
||||
|
||||
pub fn facet_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, facet_key: &FacetKey) -> ZResult<Option<Cow<'txn, Set<DocumentId>>>> {
|
||||
self.facets.get(reader, &facet_key)
|
||||
pub fn facet_document_ids<'txn>(&self, reader: &'txn RoTxn<MainT>, facet_key: &FacetKey) -> MResult<Option<(&'txn str,Cow<'txn, Set<DocumentId>>)>> {
|
||||
Ok(self.facets.get(reader, &facet_key)?)
|
||||
}
|
||||
|
||||
/// updates the facets store, revmoving the documents from the facets provided in the
|
||||
/// `facet_map` argument
|
||||
pub fn remove(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
|
||||
for (key, document_ids) in facet_map {
|
||||
if let Some(old) = self.facets.get(writer, &key)? {
|
||||
pub fn remove(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, (String, Vec<DocumentId>)>) -> MResult<()> {
|
||||
for (key, (name, document_ids)) in facet_map {
|
||||
if let Some((_, old)) = self.facets.get(writer, &key)? {
|
||||
let to_remove = SetBuf::from_dirty(document_ids);
|
||||
let new = sdset::duo::OpBuilder::new(old.as_ref(), to_remove.as_set()).difference().into_set_buf();
|
||||
self.facets.put(writer, &key, new.as_set())?;
|
||||
self.facets.put(writer, &key, &(&name, new.as_set()))?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, Vec<DocumentId>>) -> ZResult<()> {
|
||||
for (key, document_ids) in facet_map {
|
||||
pub fn add(&self, writer: &mut RwTxn<MainT>, facet_map: HashMap<FacetKey, (String, Vec<DocumentId>)>) -> MResult<()> {
|
||||
for (key, (facet_name, document_ids)) in facet_map {
|
||||
let set = SetBuf::from_dirty(document_ids);
|
||||
self.put_facet_document_ids(writer, key, set.as_set())?;
|
||||
self.put_facet_document_ids(writer, key, set.as_set(), &facet_name)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
|
||||
self.facets.clear(writer)
|
||||
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
|
||||
Ok(self.facets.clear(writer)?)
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str, CowSlice};
|
||||
@ -31,7 +31,7 @@ const SYNONYMS_KEY: &str = "synonyms";
|
||||
const UPDATED_AT_KEY: &str = "updated-at";
|
||||
const WORDS_KEY: &str = "words";
|
||||
|
||||
pub type FreqsMap = HashMap<String, usize>;
|
||||
pub type FreqsMap = BTreeMap<String, usize>;
|
||||
type SerdeFreqsMap = SerdeBincode<FreqsMap>;
|
||||
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
|
||||
|
||||
|
@ -11,12 +11,14 @@ pub fn apply_clear_all(
|
||||
index.main.put_internal_docids(writer, &sdset::SetBuf::default())?;
|
||||
index.main.put_ranked_map(writer, &RankedMap::default())?;
|
||||
index.main.put_number_of_documents(writer, |_| 0)?;
|
||||
index.main.put_sorted_document_ids_cache(writer, &[])?;
|
||||
index.documents_fields.clear(writer)?;
|
||||
index.documents_fields_counts.clear(writer)?;
|
||||
index.postings_lists.clear(writer)?;
|
||||
index.docs_words.clear(writer)?;
|
||||
index.prefix_documents_cache.clear(writer)?;
|
||||
index.prefix_postings_lists_cache.clear(writer)?;
|
||||
index.facets.clear(writer)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "meilisearch-error"
|
||||
version = "0.13.0"
|
||||
version = "0.17.0"
|
||||
authors = ["marin <postma.marin@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
actix-http = "1.0.1"
|
||||
actix-http = "2"
|
||||
|
@ -69,13 +69,15 @@ pub enum Code {
|
||||
DocumentNotFound,
|
||||
Internal,
|
||||
InvalidToken,
|
||||
Maintenance,
|
||||
MissingAuthorizationHeader,
|
||||
NotFound,
|
||||
PayloadTooLarge,
|
||||
RetrieveDocument,
|
||||
SearchDocuments,
|
||||
UnsupportedMediaType,
|
||||
|
||||
DumpAlreadyInProgress,
|
||||
DumpProcessFailed,
|
||||
}
|
||||
|
||||
impl Code {
|
||||
@ -115,13 +117,16 @@ impl Code {
|
||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
InvalidToken => ErrCode::authentication("invalid_token", StatusCode::FORBIDDEN),
|
||||
Maintenance => ErrCode::internal("maintenance", StatusCode::SERVICE_UNAVAILABLE),
|
||||
MissingAuthorizationHeader => ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED),
|
||||
NotFound => ErrCode::invalid("not_found", StatusCode::NOT_FOUND),
|
||||
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
|
||||
RetrieveDocument => ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST),
|
||||
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
|
||||
UnsupportedMediaType => ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE),
|
||||
|
||||
// error related to dump
|
||||
DumpAlreadyInProgress => ErrCode::invalid("dump_already_in_progress", StatusCode::CONFLICT),
|
||||
DumpProcessFailed => ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "meilisearch-http"
|
||||
description = "MeiliSearch HTTP server"
|
||||
version = "0.13.0"
|
||||
version = "0.17.0"
|
||||
license = "MIT"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
@ -17,40 +17,43 @@ path = "src/main.rs"
|
||||
default = ["sentry"]
|
||||
|
||||
[dependencies]
|
||||
actix-cors = "0.2.0"
|
||||
actix-http = "1"
|
||||
actix-cors = "0.4.1"
|
||||
actix-http = "2"
|
||||
actix-rt = "1"
|
||||
actix-service = "1.0.5"
|
||||
actix-web = { version = "2.0.0", features = ["rustls"] }
|
||||
actix-web-macros = "0.1.0"
|
||||
actix-service = "1.0.6"
|
||||
actix-web = { version = "3.1.0", features = ["rustls"] }
|
||||
bytes = "0.5.4"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
crossbeam-channel = "0.4.2"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
crossbeam-channel = "0.5.0"
|
||||
env_logger = "0.7.1"
|
||||
futures = "0.3.4"
|
||||
http = "0.1.19"
|
||||
flate2 = "1.0.18"
|
||||
futures = "0.3.7"
|
||||
http = "0.2.1"
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
log = "0.4.8"
|
||||
main_error = "0.1.0"
|
||||
meilisearch-core = { path = "../meilisearch-core", version = "0.13.0" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.13.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.13.0" }
|
||||
meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.13.0"}
|
||||
meilisearch-core = { path = "../meilisearch-core", version = "0.17.0" }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.17.0" }
|
||||
meilisearch-schema = { path = "../meilisearch-schema", version = "0.17.0" }
|
||||
meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.17.0"}
|
||||
mime = "0.3.16"
|
||||
once_cell = "1.4.1"
|
||||
rand = "0.7.3"
|
||||
regex = "1.3.6"
|
||||
rustls = "0.16.0"
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
serde_qs = "0.5.2"
|
||||
sha2 = "0.8.1"
|
||||
regex = "1.4.1"
|
||||
rustls = "0.18"
|
||||
serde = { version = "1.0.117", features = ["derive"] }
|
||||
serde_json = { version = "1.0.59", features = ["preserve_order"] }
|
||||
serde_qs = "0.7.0"
|
||||
sha2 = "0.9.1"
|
||||
siphasher = "0.3.2"
|
||||
slice-group-by = "0.2.6"
|
||||
structopt = "0.3.12"
|
||||
structopt = "0.3.20"
|
||||
tar = "0.4.29"
|
||||
tempfile = "3.1.0"
|
||||
tokio = { version = "0.2.18", features = ["macros"] }
|
||||
ureq = { version = "0.12.0", features = ["tls"], default-features = false }
|
||||
ureq = { version = "1.5.1", features = ["tls"], default-features = false }
|
||||
walkdir = "2.3.1"
|
||||
whoami = "0.8.1"
|
||||
whoami = "0.9.0"
|
||||
|
||||
[dependencies.sentry]
|
||||
version = "0.18.1"
|
||||
|
@ -136,13 +136,13 @@
|
||||
<div class="level-item has-text-centered">
|
||||
<div>
|
||||
<p class="heading">Documents</p>
|
||||
<p id="count" class="title">25</p>
|
||||
<p id="count" class="title">0</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="level-item has-text-centered">
|
||||
<div>
|
||||
<p class="heading">Time Spent</p>
|
||||
<p id="time" class="title">4ms</p>
|
||||
<p id="time" class="title">N/A</p>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
@ -203,7 +203,7 @@
|
||||
if (e.selectedIndex == -1) { return }
|
||||
var index = e.options[e.selectedIndex].value;
|
||||
|
||||
let theUrl = `${baseUrl}/indexes/${index}/search?q=${search.value}&attributesToHighlight=*`;
|
||||
let theUrl = `${baseUrl}/indexes/${index}/search?q=${encodeURIComponent(search.value)}&attributesToHighlight=*`;
|
||||
|
||||
if (lastRequest) { lastRequest.abort() }
|
||||
lastRequest = new XMLHttpRequest();
|
||||
@ -221,7 +221,7 @@
|
||||
results.innerHTML = '';
|
||||
|
||||
let processingTimeMs = httpResults.processingTimeMs;
|
||||
let numberOfDocuments = httpResults.hits.length;
|
||||
let numberOfDocuments = httpResults.nbHits;
|
||||
time.innerHTML = `${processingTimeMs}ms`;
|
||||
count.innerHTML = `${numberOfDocuments}`;
|
||||
|
||||
@ -299,6 +299,8 @@
|
||||
refreshIndexList();
|
||||
|
||||
search.oninput = triggerSearch;
|
||||
|
||||
let select = document.getElementById("index");
|
||||
select.onchange = triggerSearch;
|
||||
|
||||
triggerSearch();
|
||||
|
@ -1,10 +1,12 @@
|
||||
use std::error::Error;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use meilisearch_core::{Database, DatabaseOptions};
|
||||
use meilisearch_core::{Database, DatabaseOptions, Index};
|
||||
use sha2::Digest;
|
||||
|
||||
use crate::error::{Error as MSError, ResponseError};
|
||||
use crate::index_update_callback;
|
||||
use crate::option::Opt;
|
||||
|
||||
@ -25,6 +27,8 @@ impl Deref for Data {
|
||||
pub struct DataInner {
|
||||
pub db: Arc<Database>,
|
||||
pub db_path: String,
|
||||
pub dumps_dir: PathBuf,
|
||||
pub dump_batch_size: usize,
|
||||
pub api_keys: ApiKeys,
|
||||
pub server_pid: u32,
|
||||
pub http_payload_size_limit: usize,
|
||||
@ -57,11 +61,13 @@ impl ApiKeys {
|
||||
impl Data {
|
||||
pub fn new(opt: Opt) -> Result<Data, Box<dyn Error>> {
|
||||
let db_path = opt.db_path.clone();
|
||||
let dumps_dir = opt.dumps_dir.clone();
|
||||
let dump_batch_size = opt.dump_batch_size;
|
||||
let server_pid = std::process::id();
|
||||
|
||||
let db_opt = DatabaseOptions {
|
||||
main_map_size: opt.main_map_size,
|
||||
update_map_size: opt.update_map_size,
|
||||
main_map_size: opt.max_mdb_size,
|
||||
update_map_size: opt.max_udb_size,
|
||||
};
|
||||
|
||||
let http_payload_size_limit = opt.http_payload_size_limit;
|
||||
@ -79,6 +85,8 @@ impl Data {
|
||||
let inner_data = DataInner {
|
||||
db: db.clone(),
|
||||
db_path,
|
||||
dumps_dir,
|
||||
dump_batch_size,
|
||||
api_keys,
|
||||
server_pid,
|
||||
http_payload_size_limit,
|
||||
@ -95,4 +103,60 @@ impl Data {
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn create_index(&self, uid: &str) -> Result<Index, ResponseError> {
|
||||
if !uid
|
||||
.chars()
|
||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||
{
|
||||
return Err(MSError::InvalidIndexUid.into());
|
||||
}
|
||||
|
||||
let created_index = self.db.create_index(&uid).map_err(|e| match e {
|
||||
meilisearch_core::Error::IndexAlreadyExists => e.into(),
|
||||
_ => ResponseError::from(MSError::create_index(e)),
|
||||
})?;
|
||||
|
||||
self.db.main_write::<_, _, ResponseError>(|mut writer| {
|
||||
created_index.main.put_name(&mut writer, uid)?;
|
||||
|
||||
created_index
|
||||
.main
|
||||
.created_at(&writer)?
|
||||
.ok_or(MSError::internal("Impossible to read created at"))?;
|
||||
|
||||
created_index
|
||||
.main
|
||||
.updated_at(&writer)?
|
||||
.ok_or(MSError::internal("Impossible to read updated at"))?;
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(created_index)
|
||||
}
|
||||
|
||||
pub fn get_or_create_index<F, R>(&self, uid: &str, f: F) -> Result<R, ResponseError>
|
||||
where
|
||||
F: FnOnce(&Index) -> Result<R, ResponseError>,
|
||||
{
|
||||
let mut index_has_been_created = false;
|
||||
|
||||
let index = match self.db.open_index(&uid) {
|
||||
Some(index) => index,
|
||||
None => {
|
||||
index_has_been_created = true;
|
||||
self.create_index(&uid)?
|
||||
}
|
||||
};
|
||||
|
||||
match f(&index) {
|
||||
Ok(r) => Ok(r),
|
||||
Err(err) => {
|
||||
if index_has_been_created {
|
||||
let _ = self.db.delete_index(&uid);
|
||||
}
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
425
meilisearch-http/src/dump.rs
Normal file
425
meilisearch-http/src/dump.rs
Normal file
@ -0,0 +1,425 @@
|
||||
use std::fs::{create_dir_all, File};
|
||||
use std::io::prelude::*;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
use std::thread;
|
||||
|
||||
use actix_web::web;
|
||||
use chrono::offset::Utc;
|
||||
use indexmap::IndexMap;
|
||||
use log::{error, info};
|
||||
use meilisearch_core::{MainWriter, MainReader, UpdateReader};
|
||||
use meilisearch_core::settings::Settings;
|
||||
use meilisearch_core::update::{apply_settings_update, apply_documents_addition};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::compression;
|
||||
use crate::routes::index;
|
||||
use crate::routes::index::IndexResponse;
|
||||
|
||||
// Mutex to share dump progress.
|
||||
static DUMP_INFO: Lazy<Mutex<Option<DumpInfo>>> = Lazy::new(Mutex::default);
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
|
||||
enum DumpVersion {
|
||||
V1,
|
||||
}
|
||||
|
||||
impl DumpVersion {
|
||||
const CURRENT: Self = Self::V1;
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DumpMetadata {
|
||||
indexes: Vec<crate::routes::index::IndexResponse>,
|
||||
db_version: String,
|
||||
dump_version: DumpVersion,
|
||||
}
|
||||
|
||||
impl DumpMetadata {
|
||||
/// Create a DumpMetadata with the current dump version of meilisearch.
|
||||
pub fn new(indexes: Vec<crate::routes::index::IndexResponse>, db_version: String) -> Self {
|
||||
DumpMetadata {
|
||||
indexes,
|
||||
db_version,
|
||||
dump_version: DumpVersion::CURRENT,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract DumpMetadata from `metadata.json` file present at provided `dir_path`
|
||||
fn from_path(dir_path: &Path) -> Result<Self, Error> {
|
||||
let path = dir_path.join("metadata.json");
|
||||
let file = File::open(path)?;
|
||||
let reader = std::io::BufReader::new(file);
|
||||
let metadata = serde_json::from_reader(reader)?;
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Write DumpMetadata in `metadata.json` file at provided `dir_path`
|
||||
fn to_path(&self, dir_path: &Path) -> Result<(), Error> {
|
||||
let path = dir_path.join("metadata.json");
|
||||
let file = File::create(path)?;
|
||||
|
||||
serde_json::to_writer(file, &self)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract Settings from `settings.json` file present at provided `dir_path`
|
||||
fn settings_from_path(dir_path: &Path) -> Result<Settings, Error> {
|
||||
let path = dir_path.join("settings.json");
|
||||
let file = File::open(path)?;
|
||||
let reader = std::io::BufReader::new(file);
|
||||
let metadata = serde_json::from_reader(reader)?;
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Write Settings in `settings.json` file at provided `dir_path`
|
||||
fn settings_to_path(settings: &Settings, dir_path: &Path) -> Result<(), Error> {
|
||||
let path = dir_path.join("settings.json");
|
||||
let file = File::create(path)?;
|
||||
|
||||
serde_json::to_writer(file, settings)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Import settings and documents of a dump with version `DumpVersion::V1` in specified index.
|
||||
fn import_index_v1(
|
||||
data: &Data,
|
||||
dumps_dir: &Path,
|
||||
index_uid: &str,
|
||||
document_batch_size: usize,
|
||||
write_txn: &mut MainWriter,
|
||||
) -> Result<(), Error> {
|
||||
|
||||
// open index
|
||||
let index = data
|
||||
.db
|
||||
.open_index(index_uid)
|
||||
.ok_or(Error::index_not_found(index_uid))?;
|
||||
|
||||
// index dir path in dump dir
|
||||
let index_path = &dumps_dir.join(index_uid);
|
||||
|
||||
// extract `settings.json` file and import content
|
||||
let settings = settings_from_path(&index_path)?;
|
||||
let settings = settings.to_update().map_err(|e| Error::dump_failed(format!("importing settings for index {}; {}", index_uid, e)))?;
|
||||
apply_settings_update(write_txn, &index, settings)?;
|
||||
|
||||
// create iterator over documents in `documents.jsonl` to make batch importation
|
||||
// create iterator over documents in `documents.jsonl` to make batch importation
|
||||
let documents = {
|
||||
let file = File::open(&index_path.join("documents.jsonl"))?;
|
||||
let reader = std::io::BufReader::new(file);
|
||||
let deserializer = serde_json::Deserializer::from_reader(reader);
|
||||
deserializer.into_iter::<IndexMap<String, serde_json::Value>>()
|
||||
};
|
||||
|
||||
// batch import document every `document_batch_size`:
|
||||
// create a Vec to bufferize documents
|
||||
let mut values = Vec::with_capacity(document_batch_size);
|
||||
// iterate over documents
|
||||
for document in documents {
|
||||
// push document in buffer
|
||||
values.push(document?);
|
||||
// if buffer is full, create and apply a batch, and clean buffer
|
||||
if values.len() == document_batch_size {
|
||||
let batch = std::mem::replace(&mut values, Vec::with_capacity(document_batch_size));
|
||||
apply_documents_addition(write_txn, &index, batch)?;
|
||||
}
|
||||
}
|
||||
|
||||
// apply documents remaining in the buffer
|
||||
if !values.is_empty() {
|
||||
apply_documents_addition(write_txn, &index, values)?;
|
||||
}
|
||||
|
||||
// sync index information: stats, updated_at, last_update
|
||||
if let Err(e) = crate::index_update_callback_txn(index, index_uid, data, write_txn) {
|
||||
return Err(Error::Internal(e));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Import dump from `dump_path` in database.
|
||||
pub fn import_dump(
|
||||
data: &Data,
|
||||
dump_path: &Path,
|
||||
document_batch_size: usize,
|
||||
) -> Result<(), Error> {
|
||||
info!("Importing dump from {:?}...", dump_path);
|
||||
|
||||
// create a temporary directory
|
||||
let tmp_dir = TempDir::new()?;
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
// extract dump in temporary directory
|
||||
compression::from_tar_gz(dump_path, tmp_dir_path)?;
|
||||
|
||||
// read dump metadata
|
||||
let metadata = DumpMetadata::from_path(&tmp_dir_path)?;
|
||||
|
||||
// choose importation function from DumpVersion of metadata
|
||||
let import_index = match metadata.dump_version {
|
||||
DumpVersion::V1 => import_index_v1,
|
||||
};
|
||||
|
||||
// remove indexes which have same `uid` than indexes to import and create empty indexes
|
||||
let existing_index_uids = data.db.indexes_uids();
|
||||
for index in metadata.indexes.iter() {
|
||||
if existing_index_uids.contains(&index.uid) {
|
||||
data.db.delete_index(index.uid.clone())?;
|
||||
}
|
||||
index::create_index_sync(&data.db, index.uid.clone(), index.name.clone(), index.primary_key.clone())?;
|
||||
}
|
||||
|
||||
// import each indexes content
|
||||
data.db.main_write::<_, _, Error>(|mut writer| {
|
||||
for index in metadata.indexes {
|
||||
import_index(&data, tmp_dir_path, &index.uid, document_batch_size, &mut writer)?;
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
info!("Dump importation from {:?} succeed", dump_path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DumpStatus {
|
||||
Done,
|
||||
InProgress,
|
||||
Failed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DumpInfo {
|
||||
pub uid: String,
|
||||
pub status: DumpStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none", flatten)]
|
||||
pub error: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
impl DumpInfo {
|
||||
pub fn new(uid: String, status: DumpStatus) -> Self {
|
||||
Self { uid, status, error: None }
|
||||
}
|
||||
|
||||
pub fn with_error(mut self, error: ResponseError) -> Self {
|
||||
self.status = DumpStatus::Failed;
|
||||
self.error = Some(json!(error));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn dump_already_in_progress(&self) -> bool {
|
||||
self.status == DumpStatus::InProgress
|
||||
}
|
||||
|
||||
pub fn get_current() -> Option<Self> {
|
||||
DUMP_INFO.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
pub fn set_current(&self) {
|
||||
*DUMP_INFO.lock().unwrap() = Some(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate uid from creation date
|
||||
fn generate_uid() -> String {
|
||||
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string()
|
||||
}
|
||||
|
||||
/// Infer dumps_dir from dump_uid
|
||||
pub fn compressed_dumps_dir(dumps_dir: &Path, dump_uid: &str) -> PathBuf {
|
||||
dumps_dir.join(format!("{}.dump", dump_uid))
|
||||
}
|
||||
|
||||
/// Write metadata in dump
|
||||
fn dump_metadata(data: &web::Data<Data>, dir_path: &Path, indexes: Vec<IndexResponse>) -> Result<(), Error> {
|
||||
let (db_major, db_minor, db_patch) = data.db.version();
|
||||
let metadata = DumpMetadata::new(indexes, format!("{}.{}.{}", db_major, db_minor, db_patch));
|
||||
|
||||
metadata.to_path(dir_path)
|
||||
}
|
||||
|
||||
/// Export settings of provided index in dump
|
||||
fn dump_index_settings(data: &web::Data<Data>, reader: &MainReader, dir_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let settings = crate::routes::setting::get_all_sync(data, reader, index_uid)?;
|
||||
|
||||
settings_to_path(&settings, dir_path)
|
||||
}
|
||||
|
||||
/// Export updates of provided index in dump
|
||||
fn dump_index_updates(data: &web::Data<Data>, reader: &UpdateReader, dir_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let updates_path = dir_path.join("updates.jsonl");
|
||||
let updates = crate::routes::index::get_all_updates_status_sync(data, reader, index_uid)?;
|
||||
|
||||
let file = File::create(updates_path)?;
|
||||
|
||||
for update in updates {
|
||||
serde_json::to_writer(&file, &update)?;
|
||||
writeln!(&file)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Export documents of provided index in dump
|
||||
fn dump_index_documents(data: &web::Data<Data>, reader: &MainReader, dir_path: &Path, index_uid: &str) -> Result<(), Error> {
|
||||
let documents_path = dir_path.join("documents.jsonl");
|
||||
let file = File::create(documents_path)?;
|
||||
let dump_batch_size = data.dump_batch_size;
|
||||
|
||||
let mut offset = 0;
|
||||
loop {
|
||||
let documents = crate::routes::document::get_all_documents_sync(data, reader, index_uid, offset, dump_batch_size, None)?;
|
||||
if documents.is_empty() { break; } else { offset += dump_batch_size; }
|
||||
|
||||
for document in documents {
|
||||
serde_json::to_writer(&file, &document)?;
|
||||
writeln!(&file)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write error with a context.
|
||||
fn fail_dump_process<E: std::error::Error>(dump_info: DumpInfo, context: &str, error: E) {
|
||||
let error_message = format!("{}; {}", context, error);
|
||||
|
||||
error!("Something went wrong during dump process: {}", &error_message);
|
||||
dump_info.with_error(Error::dump_failed(error_message).into()).set_current();
|
||||
}
|
||||
|
||||
/// Main function of dump.
|
||||
fn dump_process(data: web::Data<Data>, dumps_dir: PathBuf, dump_info: DumpInfo) {
|
||||
// open read transaction on Update
|
||||
let update_reader = match data.db.update_read_txn() {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
fail_dump_process(dump_info, "creating RO transaction on updates", e);
|
||||
return ;
|
||||
}
|
||||
};
|
||||
|
||||
// open read transaction on Main
|
||||
let main_reader = match data.db.main_read_txn() {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
fail_dump_process(dump_info, "creating RO transaction on main", e);
|
||||
return ;
|
||||
}
|
||||
};
|
||||
|
||||
// create a temporary directory
|
||||
let tmp_dir = match TempDir::new() {
|
||||
Ok(tmp_dir) => tmp_dir,
|
||||
Err(e) => {
|
||||
fail_dump_process(dump_info, "creating temporary directory", e);
|
||||
return ;
|
||||
}
|
||||
};
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
// fetch indexes
|
||||
let indexes = match crate::routes::index::list_indexes_sync(&data, &main_reader) {
|
||||
Ok(indexes) => indexes,
|
||||
Err(e) => {
|
||||
fail_dump_process(dump_info, "listing indexes", e);
|
||||
return ;
|
||||
}
|
||||
};
|
||||
|
||||
// create metadata
|
||||
if let Err(e) = dump_metadata(&data, &tmp_dir_path, indexes.clone()) {
|
||||
fail_dump_process(dump_info, "generating metadata", e);
|
||||
return ;
|
||||
}
|
||||
|
||||
// export settings, updates and documents for each indexes
|
||||
for index in indexes {
|
||||
let index_path = tmp_dir_path.join(&index.uid);
|
||||
|
||||
// create index sub-dircetory
|
||||
if let Err(e) = create_dir_all(&index_path) {
|
||||
fail_dump_process(dump_info, &format!("creating directory for index {}", &index.uid), e);
|
||||
return ;
|
||||
}
|
||||
|
||||
// export settings
|
||||
if let Err(e) = dump_index_settings(&data, &main_reader, &index_path, &index.uid) {
|
||||
fail_dump_process(dump_info, &format!("generating settings for index {}", &index.uid), e);
|
||||
return ;
|
||||
}
|
||||
|
||||
// export documents
|
||||
if let Err(e) = dump_index_documents(&data, &main_reader, &index_path, &index.uid) {
|
||||
fail_dump_process(dump_info, &format!("generating documents for index {}", &index.uid), e);
|
||||
return ;
|
||||
}
|
||||
|
||||
// export updates
|
||||
if let Err(e) = dump_index_updates(&data, &update_reader, &index_path, &index.uid) {
|
||||
fail_dump_process(dump_info, &format!("generating updates for index {}", &index.uid), e);
|
||||
return ;
|
||||
}
|
||||
}
|
||||
|
||||
// compress dump in a file named `{dump_uid}.dump` in `dumps_dir`
|
||||
if let Err(e) = crate::helpers::compression::to_tar_gz(&tmp_dir_path, &compressed_dumps_dir(&dumps_dir, &dump_info.uid)) {
|
||||
fail_dump_process(dump_info, "compressing dump", e);
|
||||
return ;
|
||||
}
|
||||
|
||||
// update dump info to `done`
|
||||
let resume = DumpInfo::new(
|
||||
dump_info.uid,
|
||||
DumpStatus::Done
|
||||
);
|
||||
|
||||
resume.set_current();
|
||||
}
|
||||
|
||||
pub fn init_dump_process(data: &web::Data<Data>, dumps_dir: &Path) -> Result<DumpInfo, Error> {
|
||||
create_dir_all(dumps_dir).map_err(|e| Error::dump_failed(format!("creating temporary directory {}", e)))?;
|
||||
|
||||
// check if a dump is already in progress
|
||||
if let Some(resume) = DumpInfo::get_current() {
|
||||
if resume.dump_already_in_progress() {
|
||||
return Err(Error::dump_conflict())
|
||||
}
|
||||
}
|
||||
|
||||
// generate a new dump info
|
||||
let info = DumpInfo::new(
|
||||
generate_uid(),
|
||||
DumpStatus::InProgress
|
||||
);
|
||||
|
||||
info.set_current();
|
||||
|
||||
let data = data.clone();
|
||||
let dumps_dir = dumps_dir.to_path_buf();
|
||||
let info_cloned = info.clone();
|
||||
// run dump process in a new thread
|
||||
thread::spawn(move ||
|
||||
dump_process(data, dumps_dir, info_cloned)
|
||||
);
|
||||
|
||||
Ok(info)
|
||||
}
|
@ -5,7 +5,7 @@ use actix_http::ResponseBuilder;
|
||||
use actix_web as aweb;
|
||||
use actix_web::error::{JsonPayloadError, QueryPayloadError};
|
||||
use actix_web::http::StatusCode;
|
||||
use serde_json::json;
|
||||
use serde::ser::{Serialize, Serializer, SerializeStruct};
|
||||
|
||||
use meilisearch_error::{ErrorCode, Code};
|
||||
|
||||
@ -34,6 +34,51 @@ impl From<Error> for ResponseError {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_core::Error> for ResponseError {
|
||||
fn from(err: meilisearch_core::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_schema::Error> for ResponseError {
|
||||
fn from(err: meilisearch_schema::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FacetCountError> for ResponseError {
|
||||
fn from(err: FacetCountError) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ResponseError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let struct_name = "ResponseError";
|
||||
let field_count = 4;
|
||||
|
||||
let mut state = serializer.serialize_struct(struct_name, field_count)?;
|
||||
state.serialize_field("message", &self.to_string())?;
|
||||
state.serialize_field("errorCode", &self.error_name())?;
|
||||
state.serialize_field("errorType", &self.error_type())?;
|
||||
state.serialize_field("errorLink", &self.error_url())?;
|
||||
state.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl aweb::error::ResponseError for ResponseError {
|
||||
fn error_response(&self) -> aweb::HttpResponse {
|
||||
ResponseBuilder::new(self.status_code()).json(&self)
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
self.http_status()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
BadParameter(String, String),
|
||||
@ -41,10 +86,10 @@ pub enum Error {
|
||||
CreateIndex(String),
|
||||
DocumentNotFound(String),
|
||||
IndexNotFound(String),
|
||||
IndexAlreadyExists(String),
|
||||
Internal(String),
|
||||
InvalidIndexUid,
|
||||
InvalidToken(String),
|
||||
Maintenance,
|
||||
MissingAuthorizationHeader,
|
||||
NotFound(String),
|
||||
OpenIndex(String),
|
||||
@ -52,6 +97,8 @@ pub enum Error {
|
||||
SearchDocuments(String),
|
||||
PayloadTooLarge,
|
||||
UnsupportedMediaType,
|
||||
DumpAlreadyInProgress,
|
||||
DumpProcessFailed(String),
|
||||
}
|
||||
|
||||
impl error::Error for Error {}
|
||||
@ -65,10 +112,10 @@ impl ErrorCode for Error {
|
||||
CreateIndex(_) => Code::CreateIndex,
|
||||
DocumentNotFound(_) => Code::DocumentNotFound,
|
||||
IndexNotFound(_) => Code::IndexNotFound,
|
||||
IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
||||
Internal(_) => Code::Internal,
|
||||
InvalidIndexUid => Code::InvalidIndexUid,
|
||||
InvalidToken(_) => Code::InvalidToken,
|
||||
Maintenance => Code::Maintenance,
|
||||
MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
|
||||
NotFound(_) => Code::NotFound,
|
||||
OpenIndex(_) => Code::OpenIndex,
|
||||
@ -76,6 +123,8 @@ impl ErrorCode for Error {
|
||||
SearchDocuments(_) => Code::SearchDocuments,
|
||||
PayloadTooLarge => Code::PayloadTooLarge,
|
||||
UnsupportedMediaType => Code::UnsupportedMediaType,
|
||||
DumpAlreadyInProgress => Code::DumpAlreadyInProgress,
|
||||
DumpProcessFailed(_) => Code::DumpProcessFailed,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -114,10 +163,10 @@ impl fmt::Display for FacetCountError {
|
||||
use FacetCountError::*;
|
||||
|
||||
match self {
|
||||
AttributeNotSet(attr) => write!(f, "attribute {} is not set as facet", attr),
|
||||
SyntaxError(msg) => write!(f, "syntax error: {}", msg),
|
||||
UnexpectedToken { expected, found } => write!(f, "unexpected {} found, expected {:?}", found, expected),
|
||||
NoFacetSet => write!(f, "can't perform facet count, as no facet is set"),
|
||||
AttributeNotSet(attr) => write!(f, "Attribute {} is not set as facet", attr),
|
||||
SyntaxError(msg) => write!(f, "Syntax error: {}", msg),
|
||||
UnexpectedToken { expected, found } => write!(f, "Unexpected {} found, expected {:?}", found, expected),
|
||||
NoFacetSet => write!(f, "Can't perform facet count, as no facet is set"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -167,10 +216,6 @@ impl Error {
|
||||
Error::InvalidIndexUid
|
||||
}
|
||||
|
||||
pub fn maintenance() -> Error {
|
||||
Error::Maintenance
|
||||
}
|
||||
|
||||
pub fn retrieve_document(doc_id: u32, err: impl fmt::Display) -> Error {
|
||||
Error::RetrieveDocument(doc_id, err.to_string())
|
||||
}
|
||||
@ -178,6 +223,14 @@ impl Error {
|
||||
pub fn search_documents(err: impl fmt::Display) -> Error {
|
||||
Error::SearchDocuments(err.to_string())
|
||||
}
|
||||
|
||||
pub fn dump_conflict() -> Error {
|
||||
Error::DumpAlreadyInProgress
|
||||
}
|
||||
|
||||
pub fn dump_failed(message: String) -> Error {
|
||||
Error::DumpProcessFailed(message)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
@ -188,45 +241,26 @@ impl fmt::Display for Error {
|
||||
Self::CreateIndex(err) => write!(f, "Impossible to create index; {}", err),
|
||||
Self::DocumentNotFound(document_id) => write!(f, "Document with id {} not found", document_id),
|
||||
Self::IndexNotFound(index_uid) => write!(f, "Index {} not found", index_uid),
|
||||
Self::IndexAlreadyExists(index_uid) => write!(f, "Index {} already exists", index_uid),
|
||||
Self::Internal(err) => f.write_str(err),
|
||||
Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."),
|
||||
Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err),
|
||||
Self::Maintenance => f.write_str("Server is in maintenance, please try again later"),
|
||||
Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"),
|
||||
Self::NotFound(err) => write!(f, "{} not found", err),
|
||||
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
|
||||
Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err),
|
||||
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
|
||||
Self::PayloadTooLarge => f.write_str("Payload to large"),
|
||||
Self::RetrieveDocument(id, err) => write!(f, "Impossible to retrieve the document with id: {}; {}", id, err),
|
||||
Self::SearchDocuments(err) => write!(f, "Impossible to search documents; {}", err),
|
||||
Self::PayloadTooLarge => f.write_str("Payload too large"),
|
||||
Self::UnsupportedMediaType => f.write_str("Unsupported media type"),
|
||||
Self::DumpAlreadyInProgress => f.write_str("Another dump is already in progress"),
|
||||
Self::DumpProcessFailed(message) => write!(f, "Dump process failed: {}", message),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl aweb::error::ResponseError for ResponseError {
|
||||
fn error_response(&self) -> aweb::HttpResponse {
|
||||
ResponseBuilder::new(self.status_code()).json(json!({
|
||||
"message": self.to_string(),
|
||||
"errorCode": self.error_name(),
|
||||
"errorType": self.error_type(),
|
||||
"errorLink": self.error_url(),
|
||||
}))
|
||||
}
|
||||
|
||||
fn status_code(&self) -> StatusCode {
|
||||
self.http_status()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_core::Error> for ResponseError {
|
||||
fn from(err: meilisearch_core::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<meilisearch_schema::Error> for ResponseError {
|
||||
fn from(err: meilisearch_schema::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
impl From<std::io::Error> for Error {
|
||||
fn from(err: std::io::Error) -> Error {
|
||||
Error::Internal(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@ -236,9 +270,15 @@ impl From<actix_http::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FacetCountError> for ResponseError {
|
||||
fn from(err: FacetCountError) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
impl From<meilisearch_core::Error> for Error {
|
||||
fn from(err: meilisearch_core::Error) -> Error {
|
||||
Error::Internal(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<serde_json::error::Error> for Error {
|
||||
fn from(err: serde_json::error::Error) -> Error {
|
||||
Error::Internal(err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ use std::rc::Rc;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_service::{Service, Transform};
|
||||
use actix_web::{dev::ServiceRequest, dev::ServiceResponse};
|
||||
use actix_web::{dev::ServiceRequest, dev::ServiceResponse, web};
|
||||
use futures::future::{err, ok, Future, Ready};
|
||||
|
||||
use crate::error::{Error, ResponseError};
|
||||
@ -63,7 +63,7 @@ where
|
||||
let mut svc = self.service.clone();
|
||||
// This unwrap is left because this error should never appear. If that's the case, then
|
||||
// it means that actix-web has an issue or someone changes the type `Data`.
|
||||
let data = req.app_data::<Data>().unwrap();
|
||||
let data = req.app_data::<web::Data<Data>>().unwrap();
|
||||
|
||||
if data.api_keys.master.is_none() {
|
||||
return Box::pin(svc.call(req));
|
||||
|
27
meilisearch-http/src/helpers/compression.rs
Normal file
27
meilisearch-http/src/helpers/compression.rs
Normal file
@ -0,0 +1,27 @@
|
||||
use flate2::Compression;
|
||||
use flate2::read::GzDecoder;
|
||||
use flate2::write::GzEncoder;
|
||||
use std::fs::{create_dir_all, File};
|
||||
use std::path::Path;
|
||||
use tar::{Builder, Archive};
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
pub fn to_tar_gz(src: &Path, dest: &Path) -> Result<(), Error> {
|
||||
let f = File::create(dest)?;
|
||||
let gz_encoder = GzEncoder::new(f, Compression::default());
|
||||
let mut tar_encoder = Builder::new(gz_encoder);
|
||||
tar_encoder.append_dir_all(".", src)?;
|
||||
let gz_encoder = tar_encoder.into_inner()?;
|
||||
gz_encoder.finish()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn from_tar_gz(src: &Path, dest: &Path) -> Result<(), Error> {
|
||||
let f = File::open(src)?;
|
||||
let gz = GzDecoder::new(f);
|
||||
let mut ar = Archive::new(gz);
|
||||
create_dir_all(dest)?;
|
||||
ar.unpack(dest)?;
|
||||
Ok(())
|
||||
}
|
@ -293,12 +293,18 @@ impl<'a> SearchBuilder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct MatchPosition {
|
||||
pub start: usize,
|
||||
pub length: usize,
|
||||
}
|
||||
|
||||
impl PartialOrd for MatchPosition {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for MatchPosition {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
match self.start.cmp(&other.start) {
|
||||
|
@ -1,6 +1,7 @@
|
||||
pub mod authentication;
|
||||
pub mod meilisearch;
|
||||
pub mod normalize_path;
|
||||
pub mod compression;
|
||||
|
||||
pub use authentication::Authentication;
|
||||
pub use normalize_path::NormalizePath;
|
||||
|
@ -7,6 +7,8 @@ pub mod models;
|
||||
pub mod option;
|
||||
pub mod routes;
|
||||
pub mod analytics;
|
||||
pub mod snapshot;
|
||||
pub mod dump;
|
||||
|
||||
use actix_http::Error;
|
||||
use actix_service::ServiceFactory;
|
||||
@ -14,7 +16,7 @@ use actix_web::{dev, web, App};
|
||||
use chrono::Utc;
|
||||
use log::error;
|
||||
|
||||
use meilisearch_core::ProcessedUpdateResult;
|
||||
use meilisearch_core::{Index, MainWriter, ProcessedUpdateResult};
|
||||
|
||||
pub use option::Opt;
|
||||
pub use self::data::Data;
|
||||
@ -22,6 +24,7 @@ use self::error::{payload_error_handler, ResponseError};
|
||||
|
||||
pub fn create_app(
|
||||
data: &Data,
|
||||
enable_frontend: bool,
|
||||
) -> App<
|
||||
impl ServiceFactory<
|
||||
Config = (),
|
||||
@ -32,8 +35,8 @@ pub fn create_app(
|
||||
>,
|
||||
actix_http::body::Body,
|
||||
> {
|
||||
App::new()
|
||||
.app_data(web::Data::new(data.clone()))
|
||||
let app = App::new()
|
||||
.data(data.clone())
|
||||
.app_data(
|
||||
web::JsonConfig::default()
|
||||
.limit(data.http_payload_size_limit)
|
||||
@ -44,8 +47,6 @@ pub fn create_app(
|
||||
web::QueryConfig::default()
|
||||
.error_handler(|err, _req| payload_error_handler(err).into())
|
||||
)
|
||||
.service(routes::load_html)
|
||||
.service(routes::load_css)
|
||||
.configure(routes::document::services)
|
||||
.configure(routes::index::services)
|
||||
.configure(routes::search::services)
|
||||
@ -55,6 +56,30 @@ pub fn create_app(
|
||||
.configure(routes::health::services)
|
||||
.configure(routes::stats::services)
|
||||
.configure(routes::key::services)
|
||||
.configure(routes::dump::services);
|
||||
if enable_frontend {
|
||||
app
|
||||
.service(routes::load_html)
|
||||
.service(routes::load_css)
|
||||
} else {
|
||||
app
|
||||
}
|
||||
}
|
||||
|
||||
pub fn index_update_callback_txn(index: Index, index_uid: &str, data: &Data, mut writer: &mut MainWriter) -> Result<(), String> {
|
||||
if let Err(e) = data.db.compute_stats(&mut writer, index_uid) {
|
||||
return Err(format!("Impossible to compute stats; {}", e));
|
||||
}
|
||||
|
||||
if let Err(e) = data.db.set_last_update(&mut writer, &Utc::now()) {
|
||||
return Err(format!("Impossible to update last_update; {}", e));
|
||||
}
|
||||
|
||||
if let Err(e) = index.main.put_updated_at(&mut writer) {
|
||||
return Err(format!("Impossible to update updated_at; {}", e));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {
|
||||
@ -62,20 +87,13 @@ pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpda
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(index) = data.db.open_index(&index_uid) {
|
||||
if let Some(index) = data.db.open_index(index_uid) {
|
||||
let db = &data.db;
|
||||
let res = db.main_write::<_, _, ResponseError>(|mut writer| {
|
||||
if let Err(e) = data.db.compute_stats(&mut writer, &index_uid) {
|
||||
error!("Impossible to compute stats; {}", e)
|
||||
if let Err(e) = index_update_callback_txn(index, index_uid, data, &mut writer) {
|
||||
error!("{}", e);
|
||||
}
|
||||
|
||||
if let Err(e) = data.db.set_last_update(&mut writer, &Utc::now()) {
|
||||
error!("Impossible to update last_update; {}", e)
|
||||
}
|
||||
|
||||
if let Err(e) = index.main.put_updated_at(&mut writer) {
|
||||
error!("Impossible to update updated_at; {}", e)
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
match res {
|
||||
|
@ -6,6 +6,7 @@ use main_error::MainError;
|
||||
use meilisearch_http::helpers::NormalizePath;
|
||||
use meilisearch_http::{create_app, index_update_callback, Data, Opt};
|
||||
use structopt::StructOpt;
|
||||
use meilisearch_http::{snapshot, dump};
|
||||
|
||||
mod analytics;
|
||||
|
||||
@ -13,7 +14,7 @@ mod analytics;
|
||||
#[global_allocator]
|
||||
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
||||
|
||||
#[actix_rt::main]
|
||||
#[actix_web::main]
|
||||
async fn main() -> Result<(), MainError> {
|
||||
let opt = Opt::from_args();
|
||||
|
||||
@ -51,6 +52,10 @@ async fn main() -> Result<(), MainError> {
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
if let Some(path) = &opt.import_snapshot {
|
||||
snapshot::load_snapshot(&opt.db_path, path, opt.ignore_snapshot_if_db_exists, opt.ignore_missing_snapshot)?;
|
||||
}
|
||||
|
||||
let data = Data::new(opt.clone())?;
|
||||
|
||||
if !opt.no_analytics {
|
||||
@ -64,10 +69,20 @@ async fn main() -> Result<(), MainError> {
|
||||
index_update_callback(name, &data_cloned, status);
|
||||
}));
|
||||
|
||||
|
||||
if let Some(path) = &opt.import_dump {
|
||||
dump::import_dump(&data, path, opt.dump_batch_size)?;
|
||||
}
|
||||
|
||||
if opt.schedule_snapshot {
|
||||
snapshot::schedule_snapshot(data.clone(), &opt.snapshot_dir, opt.snapshot_interval_sec.unwrap_or(86400))?;
|
||||
}
|
||||
|
||||
print_launch_resume(&opt, &data);
|
||||
|
||||
let enable_frontend = opt.env != "production";
|
||||
let http_server = HttpServer::new(move || {
|
||||
create_app(&data)
|
||||
create_app(&data, enable_frontend)
|
||||
.wrap(
|
||||
Cors::new()
|
||||
.send_wildcard()
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::{error, fs};
|
||||
use std::io::{BufReader, Read};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::{error, fs};
|
||||
|
||||
use rustls::internal::pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||
use rustls::{
|
||||
@ -49,12 +49,12 @@ pub struct Opt {
|
||||
pub no_analytics: bool,
|
||||
|
||||
/// The maximum size, in bytes, of the main lmdb database directory
|
||||
#[structopt(long, env = "MEILI_MAIN_MAP_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub main_map_size: usize,
|
||||
#[structopt(long, env = "MEILI_MAX_MDB_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub max_mdb_size: usize,
|
||||
|
||||
/// The maximum size, in bytes, of the update lmdb database directory
|
||||
#[structopt(long, env = "MEILI_UPDATE_MAP_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub update_map_size: usize,
|
||||
#[structopt(long, env = "MEILI_MAX_UDB_SIZE", default_value = "107374182400")] // 100GB
|
||||
pub max_udb_size: usize,
|
||||
|
||||
/// The maximum size, in bytes, of accepted JSON payloads
|
||||
#[structopt(long, env = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT", default_value = "10485760")] // 10MB
|
||||
@ -93,6 +93,44 @@ pub struct Opt {
|
||||
/// SSL support tickets.
|
||||
#[structopt(long, env = "MEILI_SSL_TICKETS")]
|
||||
pub ssl_tickets: bool,
|
||||
|
||||
/// Defines the path of the snapshot file to import.
|
||||
/// This option will, by default, stop the process if a database already exist or if no snapshot exists at
|
||||
/// the given path. If this option is not specified no snapshot is imported.
|
||||
#[structopt(long)]
|
||||
pub import_snapshot: Option<PathBuf>,
|
||||
|
||||
/// The engine will ignore a missing snapshot and not return an error in such case.
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
pub ignore_missing_snapshot: bool,
|
||||
|
||||
/// The engine will skip snapshot importation and not return an error in such case.
|
||||
#[structopt(long, requires = "import-snapshot")]
|
||||
pub ignore_snapshot_if_db_exists: bool,
|
||||
|
||||
/// Defines the directory path where meilisearch will create snapshot each snapshot_time_gap.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_DIR", default_value = "snapshots/")]
|
||||
pub snapshot_dir: PathBuf,
|
||||
|
||||
/// Activate snapshot scheduling.
|
||||
#[structopt(long, env = "MEILI_SCHEDULE_SNAPSHOT")]
|
||||
pub schedule_snapshot: bool,
|
||||
|
||||
/// Defines time interval, in seconds, between each snapshot creation.
|
||||
#[structopt(long, env = "MEILI_SNAPSHOT_INTERVAL_SEC")]
|
||||
pub snapshot_interval_sec: Option<u64>,
|
||||
|
||||
/// Folder where dumps are created when the dump route is called.
|
||||
#[structopt(long, env = "MEILI_DUMPS_DIR", default_value = "dumps/")]
|
||||
pub dumps_dir: PathBuf,
|
||||
|
||||
/// Import a dump from the specified path, must be a `.tar.gz` file.
|
||||
#[structopt(long, conflicts_with = "import-snapshot")]
|
||||
pub import_dump: Option<PathBuf>,
|
||||
|
||||
/// The batch size used in the importation process, the bigger it is the faster the dump is created.
|
||||
#[structopt(long, env = "MEILI_DUMP_BATCH_SIZE", default_value = "1024")]
|
||||
pub dump_batch_size: usize,
|
||||
}
|
||||
|
||||
impl Opt {
|
||||
|
@ -1,11 +1,11 @@
|
||||
use std::collections::{BTreeSet, HashSet};
|
||||
|
||||
use actix_web::{delete, get, post, put};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web_macros::{delete, get, post, put};
|
||||
use indexmap::IndexMap;
|
||||
use meilisearch_core::update;
|
||||
use serde::Deserialize;
|
||||
use meilisearch_core::{update, MainReader};
|
||||
use serde_json::Value;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::{Error, ResponseError};
|
||||
@ -45,7 +45,8 @@ async fn get_document(
|
||||
|
||||
let reader = data.db.main_read_txn()?;
|
||||
|
||||
let internal_id = index.main
|
||||
let internal_id = index
|
||||
.main
|
||||
.external_to_internal_docid(&reader, &path.document_id)?
|
||||
.ok_or(Error::document_not_found(&path.document_id))?;
|
||||
|
||||
@ -85,41 +86,61 @@ struct BrowseQuery {
|
||||
attributes_to_retrieve: Option<String>,
|
||||
}
|
||||
|
||||
pub fn get_all_documents_sync(
|
||||
data: &web::Data<Data>,
|
||||
reader: &MainReader,
|
||||
index_uid: &str,
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
attributes_to_retrieve: Option<&String>
|
||||
) -> Result<Vec<Document>, Error> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(index_uid)
|
||||
.ok_or(Error::index_not_found(index_uid))?;
|
||||
|
||||
|
||||
let documents_ids: Result<BTreeSet<_>, _> = index
|
||||
.documents_fields_counts
|
||||
.documents_ids(reader)?
|
||||
.skip(offset)
|
||||
.take(limit)
|
||||
.collect();
|
||||
|
||||
let attributes: Option<HashSet<&str>> = attributes_to_retrieve
|
||||
.map(|a| a.split(',').collect());
|
||||
|
||||
let mut documents = Vec::new();
|
||||
for document_id in documents_ids? {
|
||||
if let Ok(Some(document)) =
|
||||
index.document::<Document>(reader, attributes.as_ref(), document_id)
|
||||
{
|
||||
documents.push(document);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(documents)
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/documents", wrap = "Authentication::Public")]
|
||||
async fn get_all_documents(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
params: web::Query<BrowseQuery>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let offset = params.offset.unwrap_or(0);
|
||||
let limit = params.limit.unwrap_or(20);
|
||||
|
||||
let index_uid = &path.index_uid;
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let documents_ids: Result<BTreeSet<_>, _> = index
|
||||
.documents_fields_counts
|
||||
.documents_ids(&reader)?
|
||||
.skip(offset)
|
||||
.take(limit)
|
||||
.collect();
|
||||
|
||||
let attributes: Option<HashSet<&str>> = params
|
||||
.attributes_to_retrieve
|
||||
.as_ref()
|
||||
.map(|a| a.split(',').collect());
|
||||
|
||||
let mut documents = Vec::new();
|
||||
for document_id in documents_ids? {
|
||||
if let Ok(Some(document)) =
|
||||
index.document::<Document>(&reader, attributes.as_ref(), document_id)
|
||||
{
|
||||
documents.push(document);
|
||||
}
|
||||
}
|
||||
let documents = get_all_documents_sync(
|
||||
&data,
|
||||
&reader,
|
||||
index_uid,
|
||||
offset,
|
||||
limit,
|
||||
params.attributes_to_retrieve.as_ref()
|
||||
)?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(documents))
|
||||
}
|
||||
@ -146,11 +167,7 @@ async fn update_multiple_documents(
|
||||
body: web::Json<Vec<Document>>,
|
||||
is_partial: bool,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
|
||||
let mut schema = index
|
||||
@ -164,12 +181,10 @@ async fn update_multiple_documents(
|
||||
None => body
|
||||
.first()
|
||||
.and_then(find_primary_key)
|
||||
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?
|
||||
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?,
|
||||
};
|
||||
|
||||
schema
|
||||
.set_primary_key(&id)
|
||||
.map_err(Error::bad_request)?;
|
||||
schema.set_primary_key(&id).map_err(Error::bad_request)?;
|
||||
|
||||
data.db.main_write(|w| index.main.put_schema(w, &schema))?;
|
||||
}
|
||||
@ -184,9 +199,9 @@ async fn update_multiple_documents(
|
||||
document_addition.update_document(document);
|
||||
}
|
||||
|
||||
let update_id = data.db.update_write(|w| document_addition.finalize(w))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
Ok(data.db.update_write(|w| document_addition.finalize(w))?)
|
||||
})?;
|
||||
return Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)));
|
||||
}
|
||||
|
||||
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
||||
@ -223,7 +238,6 @@ async fn delete_documents(
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
|
||||
let mut documents_deletion = index.documents_deletion();
|
||||
|
||||
for document_id in body.into_inner() {
|
||||
|
64
meilisearch-http/src/routes/dump.rs
Normal file
64
meilisearch-http/src/routes/dump.rs
Normal file
@ -0,0 +1,64 @@
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use actix_web::{get, post};
|
||||
use actix_web::{HttpResponse, web};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::dump::{DumpInfo, DumpStatus, compressed_dumps_dir, init_dump_process};
|
||||
use crate::Data;
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::Authentication;
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(trigger_dump)
|
||||
.service(get_dump_status);
|
||||
}
|
||||
|
||||
#[post("/dumps", wrap = "Authentication::Private")]
|
||||
async fn trigger_dump(
|
||||
data: web::Data<Data>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let dumps_dir = Path::new(&data.dumps_dir);
|
||||
match init_dump_process(&data, &dumps_dir) {
|
||||
Ok(resume) => Ok(HttpResponse::Accepted().json(resume)),
|
||||
Err(e) => Err(e.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DumpStatusResponse {
|
||||
status: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DumpParam {
|
||||
dump_uid: String,
|
||||
}
|
||||
|
||||
#[get("/dumps/{dump_uid}/status", wrap = "Authentication::Private")]
|
||||
async fn get_dump_status(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<DumpParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let dumps_dir = Path::new(&data.dumps_dir);
|
||||
let dump_uid = &path.dump_uid;
|
||||
|
||||
if let Some(resume) = DumpInfo::get_current() {
|
||||
if &resume.uid == dump_uid {
|
||||
return Ok(HttpResponse::Ok().json(resume));
|
||||
}
|
||||
}
|
||||
|
||||
if File::open(compressed_dumps_dir(Path::new(dumps_dir), dump_uid)).is_ok() {
|
||||
let resume = DumpInfo::new(
|
||||
dump_uid.into(),
|
||||
DumpStatus::Done
|
||||
);
|
||||
|
||||
Ok(HttpResponse::Ok().json(resume))
|
||||
} else {
|
||||
Err(Error::not_found("dump does not exist").into())
|
||||
}
|
||||
}
|
@ -1,47 +1,13 @@
|
||||
use actix_web::get;
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web_macros::{get, put};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::Authentication;
|
||||
use crate::Data;
|
||||
use crate::error::ResponseError;
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(get_health).service(change_healthyness);
|
||||
cfg.service(get_health);
|
||||
}
|
||||
|
||||
#[get("/health")]
|
||||
async fn get_health(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
if let Ok(Some(_)) = data.db.get_health(&reader) {
|
||||
return Err(Error::Maintenance.into());
|
||||
}
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
async fn set_healthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
data.db.main_write(|w| data.db.set_healthy(w))?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
async fn set_unhealthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
data.db.main_write(|w| data.db.set_unhealthy(w))?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
struct HealthBody {
|
||||
health: bool,
|
||||
}
|
||||
|
||||
#[put("/health", wrap = "Authentication::Private")]
|
||||
async fn change_healthyness(
|
||||
data: web::Data<Data>,
|
||||
body: web::Json<HealthBody>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
if body.health {
|
||||
set_healthy(data).await
|
||||
} else {
|
||||
set_unhealthy(data).await
|
||||
}
|
||||
async fn get_health() -> Result<HttpResponse, ResponseError> {
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
|
@ -1,14 +1,16 @@
|
||||
use actix_web::{delete, get, post, put};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web_macros::{delete, get, post, put};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::error;
|
||||
use meilisearch_core::{Database, MainReader, UpdateReader};
|
||||
use meilisearch_core::update::UpdateStatus;
|
||||
use rand::seq::SliceRandom;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::Authentication;
|
||||
use crate::routes::IndexParam;
|
||||
use crate::Data;
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(list_indexes)
|
||||
@ -29,19 +31,17 @@ fn generate_uid() -> String {
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct IndexResponse {
|
||||
name: String,
|
||||
uid: String,
|
||||
pub struct IndexResponse {
|
||||
pub name: String,
|
||||
pub uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
primary_key: Option<String>,
|
||||
pub primary_key: Option<String>,
|
||||
}
|
||||
|
||||
#[get("/indexes", wrap = "Authentication::Private")]
|
||||
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
pub fn list_indexes_sync(data: &web::Data<Data>, reader: &MainReader) -> Result<Vec<IndexResponse>, ResponseError> {
|
||||
let mut indexes = Vec::new();
|
||||
|
||||
for index_uid in data.db.indexes_uids() {
|
||||
@ -49,23 +49,23 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseErr
|
||||
|
||||
match index {
|
||||
Some(index) => {
|
||||
let name = index.main.name(&reader)?.ok_or(Error::internal(
|
||||
let name = index.main.name(reader)?.ok_or(Error::internal(
|
||||
"Impossible to get the name of an index",
|
||||
))?;
|
||||
let created_at = index
|
||||
.main
|
||||
.created_at(&reader)?
|
||||
.created_at(reader)?
|
||||
.ok_or(Error::internal(
|
||||
"Impossible to get the create date of an index",
|
||||
))?;
|
||||
let updated_at = index
|
||||
.main
|
||||
.updated_at(&reader)?
|
||||
.updated_at(reader)?
|
||||
.ok_or(Error::internal(
|
||||
"Impossible to get the last update date of an index",
|
||||
))?;
|
||||
|
||||
let primary_key = match index.main.schema(&reader) {
|
||||
let primary_key = match index.main.schema(reader) {
|
||||
Ok(Some(schema)) => match schema.primary_key() {
|
||||
Some(primary_key) => Some(primary_key.to_owned()),
|
||||
None => None,
|
||||
@ -89,6 +89,14 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseErr
|
||||
}
|
||||
}
|
||||
|
||||
Ok(indexes)
|
||||
}
|
||||
|
||||
#[get("/indexes", wrap = "Authentication::Private")]
|
||||
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let indexes = list_indexes_sync(&data, &reader)?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(indexes))
|
||||
}
|
||||
|
||||
@ -145,6 +153,55 @@ struct IndexCreateRequest {
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
|
||||
pub fn create_index_sync(
|
||||
database: &std::sync::Arc<Database>,
|
||||
uid: String,
|
||||
name: String,
|
||||
primary_key: Option<String>,
|
||||
) -> Result<IndexResponse, Error> {
|
||||
|
||||
let created_index = database
|
||||
.create_index(&uid)
|
||||
.map_err(|e| match e {
|
||||
meilisearch_core::Error::IndexAlreadyExists => Error::IndexAlreadyExists(uid.clone()),
|
||||
_ => Error::create_index(e)
|
||||
})?;
|
||||
|
||||
let index_response = database.main_write::<_, _, Error>(|mut write_txn| {
|
||||
created_index.main.put_name(&mut write_txn, &name)?;
|
||||
|
||||
let created_at = created_index
|
||||
.main
|
||||
.created_at(&write_txn)?
|
||||
.ok_or(Error::internal("Impossible to read created at"))?;
|
||||
|
||||
let updated_at = created_index
|
||||
.main
|
||||
.updated_at(&write_txn)?
|
||||
.ok_or(Error::internal("Impossible to read updated at"))?;
|
||||
|
||||
if let Some(id) = primary_key.clone() {
|
||||
if let Some(mut schema) = created_index.main.schema(&write_txn)? {
|
||||
schema
|
||||
.set_primary_key(&id)
|
||||
.map_err(Error::bad_request)?;
|
||||
created_index.main.put_schema(&mut write_txn, &schema)?;
|
||||
}
|
||||
}
|
||||
let index_response = IndexResponse {
|
||||
name,
|
||||
uid,
|
||||
created_at,
|
||||
updated_at,
|
||||
primary_key,
|
||||
};
|
||||
Ok(index_response)
|
||||
})?;
|
||||
|
||||
Ok(index_response)
|
||||
}
|
||||
|
||||
#[post("/indexes", wrap = "Authentication::Private")]
|
||||
async fn create_index(
|
||||
data: web::Data<Data>,
|
||||
@ -175,45 +232,9 @@ async fn create_index(
|
||||
},
|
||||
};
|
||||
|
||||
let created_index = data
|
||||
.db
|
||||
.create_index(&uid)
|
||||
.map_err(|e| match e {
|
||||
meilisearch_core::Error::IndexAlreadyExists => e.into(),
|
||||
_ => ResponseError::from(Error::create_index(e))
|
||||
})?;
|
||||
let name = body.name.as_ref().unwrap_or(&uid).to_string();
|
||||
|
||||
let index_response = data.db.main_write::<_, _, ResponseError>(|mut writer| {
|
||||
let name = body.name.as_ref().unwrap_or(&uid);
|
||||
created_index.main.put_name(&mut writer, name)?;
|
||||
|
||||
let created_at = created_index
|
||||
.main
|
||||
.created_at(&writer)?
|
||||
.ok_or(Error::internal("Impossible to read created at"))?;
|
||||
|
||||
let updated_at = created_index
|
||||
.main
|
||||
.updated_at(&writer)?
|
||||
.ok_or(Error::internal("Impossible to read updated at"))?;
|
||||
|
||||
if let Some(id) = body.primary_key.clone() {
|
||||
if let Some(mut schema) = created_index.main.schema(&writer)? {
|
||||
schema
|
||||
.set_primary_key(&id)
|
||||
.map_err(Error::bad_request)?;
|
||||
created_index.main.put_schema(&mut writer, &schema)?;
|
||||
}
|
||||
}
|
||||
let index_response = IndexResponse {
|
||||
name: name.to_string(),
|
||||
uid,
|
||||
created_at,
|
||||
updated_at,
|
||||
primary_key: body.primary_key.clone(),
|
||||
};
|
||||
Ok(index_response)
|
||||
})?;
|
||||
let index_response = create_index_sync(&data.db, uid, name, body.primary_key.clone())?;
|
||||
|
||||
Ok(HttpResponse::Created().json(index_response))
|
||||
}
|
||||
@ -340,20 +361,28 @@ async fn get_update_status(
|
||||
)).into()),
|
||||
}
|
||||
}
|
||||
pub fn get_all_updates_status_sync(
|
||||
data: &web::Data<Data>,
|
||||
reader: &UpdateReader,
|
||||
index_uid: &str,
|
||||
) -> Result<Vec<UpdateStatus>, Error> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(index_uid)
|
||||
.ok_or(Error::index_not_found(index_uid))?;
|
||||
|
||||
Ok(index.all_updates_status(reader)?)
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")]
|
||||
async fn get_all_updates_status(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let reader = data.db.update_read_txn()?;
|
||||
|
||||
let response = index.all_updates_status(&reader)?;
|
||||
let response = get_all_updates_status_sync(&data, &reader, &path.index_uid)?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use actix_web::web;
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web_macros::get;
|
||||
use actix_web::get;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::helpers::Authentication;
|
||||
|
@ -10,6 +10,7 @@ pub mod setting;
|
||||
pub mod stats;
|
||||
pub mod stop_words;
|
||||
pub mod synonym;
|
||||
pub mod dump;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct IndexParam {
|
||||
|
@ -1,9 +1,7 @@
|
||||
use std::collections::{HashSet, HashMap};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use actix_web::{get, post, web, HttpResponse};
|
||||
use log::warn;
|
||||
use actix_web::web;
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web_macros::{get, post};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
@ -14,11 +12,10 @@ use crate::routes::IndexParam;
|
||||
use crate::Data;
|
||||
|
||||
use meilisearch_core::facets::FacetFilter;
|
||||
use meilisearch_schema::{Schema, FieldId};
|
||||
use meilisearch_schema::{FieldId, Schema};
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(search_with_post)
|
||||
.service(search_with_url_query);
|
||||
cfg.service(search_with_post).service(search_with_url_query);
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@ -93,7 +90,11 @@ async fn search_with_post(
|
||||
}
|
||||
|
||||
impl SearchQuery {
|
||||
fn search(&self, index_uid: &str, data: web::Data<Data>) -> Result<SearchResult, ResponseError> {
|
||||
fn search(
|
||||
&self,
|
||||
index_uid: &str,
|
||||
data: web::Data<Data>,
|
||||
) -> Result<SearchResult, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(index_uid)
|
||||
@ -105,7 +106,12 @@ impl SearchQuery {
|
||||
.schema(&reader)?
|
||||
.ok_or(Error::internal("Impossible to retrieve the schema"))?;
|
||||
|
||||
let mut search_builder = index.new_search(self.q.clone());
|
||||
let query = self
|
||||
.q
|
||||
.clone()
|
||||
.and_then(|q| if q.is_empty() { None } else { Some(q) });
|
||||
|
||||
let mut search_builder = index.new_search(query);
|
||||
|
||||
if let Some(offset) = self.offset {
|
||||
search_builder.offset(offset);
|
||||
@ -118,7 +124,8 @@ impl SearchQuery {
|
||||
let mut restricted_attributes: HashSet<&str>;
|
||||
match &self.attributes_to_retrieve {
|
||||
Some(attributes_to_retrieve) => {
|
||||
let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect();
|
||||
let attributes_to_retrieve: HashSet<&str> =
|
||||
attributes_to_retrieve.split(',').collect();
|
||||
if attributes_to_retrieve.contains("*") {
|
||||
restricted_attributes = available_attributes.clone();
|
||||
} else {
|
||||
@ -132,15 +139,22 @@ impl SearchQuery {
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
None => {
|
||||
restricted_attributes = available_attributes.clone();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref facet_filters) = self.facet_filters {
|
||||
let attrs = index.main.attributes_for_faceting(&reader)?.unwrap_or_default();
|
||||
search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, &attrs)?);
|
||||
let attrs = index
|
||||
.main
|
||||
.attributes_for_faceting(&reader)?
|
||||
.unwrap_or_default();
|
||||
search_builder.add_facet_filters(FacetFilter::from_str(
|
||||
facet_filters,
|
||||
&schema,
|
||||
&attrs,
|
||||
)?);
|
||||
}
|
||||
|
||||
if let Some(facets) = &self.facets_distribution {
|
||||
@ -148,7 +162,7 @@ impl SearchQuery {
|
||||
Some(ref attrs) => {
|
||||
let field_ids = prepare_facet_list(&facets, &schema, attrs)?;
|
||||
search_builder.add_facets(field_ids);
|
||||
},
|
||||
}
|
||||
None => return Err(FacetCountError::NoFacetSet.into()),
|
||||
}
|
||||
}
|
||||
@ -160,20 +174,23 @@ impl SearchQuery {
|
||||
for attribute in attributes_to_crop.split(',') {
|
||||
let mut attribute = attribute.split(':');
|
||||
let attr = attribute.next();
|
||||
let length = attribute.next().and_then(|s| s.parse().ok()).unwrap_or(default_length);
|
||||
let length = attribute
|
||||
.next()
|
||||
.and_then(|s| s.parse().ok())
|
||||
.unwrap_or(default_length);
|
||||
match attr {
|
||||
Some("*") => {
|
||||
for attr in &restricted_attributes {
|
||||
final_attributes.insert(attr.to_string(), length);
|
||||
}
|
||||
},
|
||||
}
|
||||
Some(attr) => {
|
||||
if available_attributes.contains(attr) {
|
||||
final_attributes.insert(attr.to_string(), length);
|
||||
} else {
|
||||
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
|
||||
}
|
||||
},
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
@ -215,7 +232,11 @@ impl SearchQuery {
|
||||
///
|
||||
/// An error is returned if the array is malformed, or if it contains attributes that are
|
||||
/// unexisting, or not set as facets.
|
||||
fn prepare_facet_list(facets: &str, schema: &Schema, facet_attrs: &[FieldId]) -> Result<Vec<(FieldId, String)>, FacetCountError> {
|
||||
fn prepare_facet_list(
|
||||
facets: &str,
|
||||
schema: &Schema,
|
||||
facet_attrs: &[FieldId],
|
||||
) -> Result<Vec<(FieldId, String)>, FacetCountError> {
|
||||
let json_array = serde_json::from_str(facets)?;
|
||||
match json_array {
|
||||
Value::Array(vals) => {
|
||||
@ -243,6 +264,6 @@ fn prepare_facet_list(facets: &str, schema: &Schema, facet_attrs: &[FieldId]) ->
|
||||
}
|
||||
Ok(field_ids)
|
||||
}
|
||||
bad_val => Err(FacetCountError::unexpected_token(bad_val, &["[String]"]))
|
||||
bad_val => Err(FacetCountError::unexpected_token(bad_val, &["[String]"])),
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,15 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use actix_web::{delete, get, post};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web_macros::{delete, get, post};
|
||||
use meilisearch_core::{MainReader, UpdateWriter};
|
||||
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES};
|
||||
use meilisearch_schema::Schema;
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::{Error, ResponseError};
|
||||
use crate::helpers::Authentication;
|
||||
use crate::routes::{IndexParam, IndexUpdateResponse};
|
||||
use crate::Data;
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(update_all)
|
||||
@ -30,87 +32,83 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
.service(update_attributes_for_faceting);
|
||||
}
|
||||
|
||||
pub fn update_all_settings_txn(
|
||||
data: &web::Data<Data>,
|
||||
settings: SettingsUpdate,
|
||||
index_uid: &str,
|
||||
write_txn: &mut UpdateWriter,
|
||||
) -> Result<u64, Error> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(index_uid)
|
||||
.ok_or(Error::index_not_found(index_uid))?;
|
||||
|
||||
let update_id = index.settings_update(write_txn, settings)?;
|
||||
Ok(update_id)
|
||||
}
|
||||
|
||||
#[post("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
||||
async fn update_all(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Settings>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.db.update_write::<_, _, ResponseError>(|writer| {
|
||||
let settings = body
|
||||
.into_inner()
|
||||
.to_update()
|
||||
.map_err(Error::bad_request)?;
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
Ok(data.db.update_write::<_, _, ResponseError>(|writer| {
|
||||
let settings = body.into_inner().to_update().map_err(Error::bad_request)?;
|
||||
let update_id = index.settings_update(writer, settings)?;
|
||||
Ok(update_id)
|
||||
})?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
||||
async fn get_all(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
pub fn get_all_sync(data: &web::Data<Data>, reader: &MainReader, index_uid: &str) -> Result<Settings, Error> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
.open_index(index_uid)
|
||||
.ok_or(Error::index_not_found(index_uid))?;
|
||||
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let stop_words: BTreeSet<String> = index.main.stop_words(&reader)?.into_iter().collect();
|
||||
|
||||
let stop_words: BTreeSet<String> = index
|
||||
.main
|
||||
.stop_words(&reader)?
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let synonyms_list = index.main.synonyms(&reader)?;
|
||||
let synonyms_list = index.main.synonyms(reader)?;
|
||||
|
||||
let mut synonyms = BTreeMap::new();
|
||||
let index_synonyms = &index.synonyms;
|
||||
for synonym in synonyms_list {
|
||||
let list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
|
||||
let list = index_synonyms.synonyms(reader, synonym.as_bytes())?;
|
||||
synonyms.insert(synonym, list);
|
||||
}
|
||||
|
||||
let ranking_rules = index
|
||||
.main
|
||||
.ranking_rules(&reader)?
|
||||
.ranking_rules(reader)?
|
||||
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
|
||||
.into_iter()
|
||||
.map(|r| r.to_string())
|
||||
.collect();
|
||||
|
||||
|
||||
let schema = index.main.schema(&reader)?;
|
||||
|
||||
let distinct_attribute = match (index.main.distinct_attribute(&reader)?, &schema) {
|
||||
let distinct_attribute = match (index.main.distinct_attribute(reader)?, &schema) {
|
||||
(Some(id), Some(schema)) => schema.name(id).map(str::to_string),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(&reader)?) {
|
||||
(Some(schema), Some(attrs)) => {
|
||||
attrs
|
||||
(Some(schema), Some(attrs)) => attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id))
|
||||
.map(str::to_string)
|
||||
.collect()
|
||||
}
|
||||
.collect(),
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
let searchable_attributes = schema.as_ref().map(get_indexed_attributes);
|
||||
let displayed_attributes = schema.as_ref().map(get_displayed_attributes);
|
||||
|
||||
let settings = Settings {
|
||||
Ok(Settings {
|
||||
ranking_rules: Some(Some(ranking_rules)),
|
||||
distinct_attribute: Some(distinct_attribute),
|
||||
searchable_attributes: Some(searchable_attributes),
|
||||
@ -118,7 +116,16 @@ async fn get_all(
|
||||
stop_words: Some(Some(stop_words)),
|
||||
synonyms: Some(Some(synonyms)),
|
||||
attributes_for_faceting: Some(Some(attributes_for_faceting)),
|
||||
};
|
||||
})
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
||||
async fn get_all(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let settings = get_all_sync(&data, &reader, &path.index_uid)?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(settings))
|
||||
}
|
||||
@ -144,7 +151,9 @@ async fn delete_all(
|
||||
attributes_for_faceting: UpdateState::Clear,
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -183,18 +192,17 @@ async fn update_rules(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
ranking_rules: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -217,7 +225,9 @@ async fn delete_rules(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -254,18 +264,17 @@ async fn update_distinct(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<String>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
distinct_attribute: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -288,7 +297,9 @@ async fn delete_distinct(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -307,8 +318,7 @@ async fn get_searchable(
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
let reader = data.db.main_read_txn()?;
|
||||
let schema = index.main.schema(&reader)?;
|
||||
let searchable_attributes: Option<Vec<String>> =
|
||||
schema.as_ref().map(get_indexed_attributes);
|
||||
let searchable_attributes: Option<Vec<String>> = schema.as_ref().map(get_indexed_attributes);
|
||||
|
||||
Ok(HttpResponse::Ok().json(searchable_attributes))
|
||||
}
|
||||
@ -322,11 +332,7 @@ async fn update_searchable(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
searchable_attributes: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
@ -334,7 +340,10 @@ async fn update_searchable(
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -357,7 +366,9 @@ async fn delete_searchable(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -390,20 +401,19 @@ async fn get_displayed(
|
||||
async fn update_displayed(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<HashSet<String>>>,
|
||||
body: web::Json<Option<BTreeSet<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
displayed_attributes: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -426,7 +436,9 @@ async fn delete_displayed(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -444,20 +456,16 @@ async fn get_attributes_for_faceting(
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let attributes_for_faceting = data
|
||||
.db
|
||||
.main_read::<_, _, ResponseError>(|reader| {
|
||||
let attributes_for_faceting = data.db.main_read::<_, _, ResponseError>(|reader| {
|
||||
let schema = index.main.schema(reader)?;
|
||||
let attrs = index.main.attributes_for_faceting(reader)?;
|
||||
let attr_names = match (&schema, &attrs) {
|
||||
(Some(schema), Some(attrs)) => {
|
||||
attrs
|
||||
(Some(schema), Some(attrs)) => attrs
|
||||
.iter()
|
||||
.filter_map(|&id| schema.name(id))
|
||||
.map(str::to_string)
|
||||
.collect()
|
||||
}
|
||||
_ => vec![]
|
||||
.collect(),
|
||||
_ => vec![],
|
||||
};
|
||||
Ok(attr_names)
|
||||
})?;
|
||||
@ -474,18 +482,17 @@ async fn update_attributes_for_faceting(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<Option<Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = Settings {
|
||||
attributes_for_faceting: Some(body.into_inner()),
|
||||
..Settings::default()
|
||||
};
|
||||
|
||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -508,7 +515,9 @@ async fn delete_attributes_for_faceting(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -517,18 +526,20 @@ fn get_indexed_attributes(schema: &Schema) -> Vec<String> {
|
||||
if schema.is_indexed_all() {
|
||||
["*"].iter().map(|s| s.to_string()).collect()
|
||||
} else {
|
||||
schema.indexed_name()
|
||||
schema
|
||||
.indexed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_displayed_attributes(schema: &Schema) -> HashSet<String> {
|
||||
fn get_displayed_attributes(schema: &Schema) -> BTreeSet<String> {
|
||||
if schema.is_displayed_all() {
|
||||
["*"].iter().map(|s| s.to_string()).collect()
|
||||
} else {
|
||||
schema.displayed_name()
|
||||
schema
|
||||
.displayed_name()
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect()
|
||||
|
@ -1,8 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::{HashMap, BTreeMap};
|
||||
|
||||
use actix_web::web;
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web_macros::get;
|
||||
use actix_web::get;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::error;
|
||||
use serde::Serialize;
|
||||
@ -24,7 +24,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
struct IndexStatsResponse {
|
||||
number_of_documents: u64,
|
||||
is_indexing: bool,
|
||||
fields_distribution: HashMap<String, usize>,
|
||||
fields_distribution: BTreeMap<String, usize>,
|
||||
}
|
||||
|
||||
#[get("/indexes/{index_uid}/stats", wrap = "Authentication::Private")]
|
||||
|
@ -1,5 +1,5 @@
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web_macros::{delete, get, post};
|
||||
use actix_web::{delete, get, post};
|
||||
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
@ -39,17 +39,16 @@ async fn update(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<BTreeSet<String>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = SettingsUpdate {
|
||||
stop_words: UpdateState::Update(body.into_inner()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -72,7 +71,9 @@ async fn delete(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web_macros::{delete, get, post};
|
||||
use actix_web::{delete, get, post};
|
||||
use indexmap::IndexMap;
|
||||
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
|
||||
|
||||
@ -50,17 +50,16 @@ async fn update(
|
||||
path: web::Path<IndexParam>,
|
||||
body: web::Json<BTreeMap<String, Vec<String>>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = data
|
||||
.db
|
||||
.open_index(&path.index_uid)
|
||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
||||
|
||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
||||
let settings = SettingsUpdate {
|
||||
synonyms: UpdateState::Update(body.into_inner()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
Ok(data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?)
|
||||
})?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
@ -83,7 +82,9 @@ async fn delete(
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let update_id = data.db.update_write(|w| index.settings_update(w, settings))?;
|
||||
let update_id = data
|
||||
.db
|
||||
.update_write(|w| index.settings_update(w, settings))?;
|
||||
|
||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
||||
}
|
||||
|
96
meilisearch-http/src/snapshot.rs
Normal file
96
meilisearch-http/src/snapshot.rs
Normal file
@ -0,0 +1,96 @@
|
||||
use crate::Data;
|
||||
use crate::error::Error;
|
||||
use crate::helpers::compression;
|
||||
|
||||
use log::error;
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::Path;
|
||||
use std::thread;
|
||||
use std::time::{Duration};
|
||||
use tempfile::TempDir;
|
||||
|
||||
pub fn load_snapshot(
|
||||
db_path: &str,
|
||||
snapshot_path: &Path,
|
||||
ignore_snapshot_if_db_exists: bool,
|
||||
ignore_missing_snapshot: bool
|
||||
) -> Result<(), Error> {
|
||||
let db_path = Path::new(db_path);
|
||||
|
||||
if !db_path.exists() && snapshot_path.exists() {
|
||||
compression::from_tar_gz(snapshot_path, db_path)
|
||||
} else if db_path.exists() && !ignore_snapshot_if_db_exists {
|
||||
Err(Error::Internal(format!("database already exists at {:?}, try to delete it or rename it", db_path.canonicalize().unwrap_or(db_path.into()))))
|
||||
} else if !snapshot_path.exists() && !ignore_missing_snapshot {
|
||||
Err(Error::Internal(format!("snapshot doesn't exist at {:?}", snapshot_path.canonicalize().unwrap_or(snapshot_path.into()))))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_snapshot(data: &Data, snapshot_path: &Path) -> Result<(), Error> {
|
||||
let tmp_dir = TempDir::new()?;
|
||||
|
||||
data.db.copy_and_compact_to_path(tmp_dir.path())?;
|
||||
|
||||
compression::to_tar_gz(tmp_dir.path(), snapshot_path).map_err(|e| Error::Internal(format!("something went wrong during snapshot compression: {}", e)))
|
||||
}
|
||||
|
||||
pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Result<(), Error> {
|
||||
if snapshot_dir.file_name().is_none() {
|
||||
return Err(Error::Internal("invalid snapshot file path".to_string()));
|
||||
}
|
||||
let db_name = Path::new(&data.db_path).file_name().ok_or_else(|| Error::Internal("invalid database name".to_string()))?;
|
||||
create_dir_all(snapshot_dir)?;
|
||||
let snapshot_path = snapshot_dir.join(format!("{}.snapshot", db_name.to_str().unwrap_or("data.ms")));
|
||||
|
||||
thread::spawn(move || loop {
|
||||
if let Err(e) = create_snapshot(&data, &snapshot_path) {
|
||||
error!("Unsuccessful snapshot creation: {}", e);
|
||||
}
|
||||
thread::sleep(Duration::from_secs(time_gap_s));
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::io::prelude::*;
|
||||
use std::fs;
|
||||
|
||||
#[test]
|
||||
fn test_pack_unpack() {
|
||||
let tempdir = TempDir::new().unwrap();
|
||||
|
||||
let test_dir = tempdir.path();
|
||||
let src_dir = test_dir.join("src");
|
||||
let dest_dir = test_dir.join("complex/destination/path/");
|
||||
let archive_path = test_dir.join("archive.snapshot");
|
||||
|
||||
let file_1_relative = Path::new("file1.txt");
|
||||
let subdir_relative = Path::new("subdir/");
|
||||
let file_2_relative = Path::new("subdir/file2.txt");
|
||||
|
||||
create_dir_all(src_dir.join(subdir_relative)).unwrap();
|
||||
fs::File::create(src_dir.join(file_1_relative)).unwrap().write_all(b"Hello_file_1").unwrap();
|
||||
fs::File::create(src_dir.join(file_2_relative)).unwrap().write_all(b"Hello_file_2").unwrap();
|
||||
|
||||
|
||||
assert!(compression::to_tar_gz(&src_dir, &archive_path).is_ok());
|
||||
assert!(archive_path.exists());
|
||||
assert!(load_snapshot(&dest_dir.to_str().unwrap(), &archive_path, false, false).is_ok());
|
||||
|
||||
assert!(dest_dir.exists());
|
||||
assert!(dest_dir.join(file_1_relative).exists());
|
||||
assert!(dest_dir.join(subdir_relative).exists());
|
||||
assert!(dest_dir.join(file_2_relative).exists());
|
||||
|
||||
let contents = fs::read_to_string(dest_dir.join(file_1_relative)).unwrap();
|
||||
assert_eq!(contents, "Hello_file_1");
|
||||
|
||||
let contents = fs::read_to_string(dest_dir.join(file_2_relative)).unwrap();
|
||||
assert_eq!(contents, "Hello_file_2");
|
||||
}
|
||||
}
|
12
meilisearch-http/tests/assets/dumps/v1/metadata.json
Normal file
12
meilisearch-http/tests/assets/dumps/v1/metadata.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"indices": [{
|
||||
"uid": "test",
|
||||
"primaryKey": "id"
|
||||
}, {
|
||||
"uid": "test2",
|
||||
"primaryKey": "test2_id"
|
||||
}
|
||||
],
|
||||
"dbVersion": "0.13.0",
|
||||
"dumpVersion": "1"
|
||||
}
|
77
meilisearch-http/tests/assets/dumps/v1/test/documents.jsonl
Normal file
77
meilisearch-http/tests/assets/dumps/v1/test/documents.jsonl
Normal file
@ -0,0 +1,77 @@
|
||||
{"id":0,"isActive":false,"balance":"$2,668.55","picture":"http://placehold.it/32x32","age":36,"color":"Green","name":"Lucas Hess","gender":"male","email":"lucashess@chorizon.com","phone":"+1 (998) 478-2597","address":"412 Losee Terrace, Blairstown, Georgia, 2825","about":"Mollit ad in exercitation quis. Anim est ut consequat fugiat duis magna aliquip velit nisi. Commodo eiusmod est consequat proident consectetur aliqua enim fugiat. Aliqua adipisicing laboris elit proident enim veniam laboris mollit. Incididunt fugiat minim ad nostrud deserunt tempor in. Id irure officia labore qui est labore nulla nisi. Magna sit quis tempor esse consectetur amet labore duis aliqua consequat.\r\n","registered":"2016-06-21T09:30:25 -02:00","latitude":-44.174957,"longitude":-145.725388,"tags":["bug","bug"]}
|
||||
{"id":1,"isActive":true,"balance":"$1,706.13","picture":"http://placehold.it/32x32","age":27,"color":"Green","name":"Cherry Orr","gender":"female","email":"cherryorr@chorizon.com","phone":"+1 (995) 479-3174","address":"442 Beverly Road, Ventress, New Mexico, 3361","about":"Exercitation officia mollit proident nostrud ea. Pariatur voluptate labore nostrud magna duis non elit et incididunt Lorem velit duis amet commodo. Irure in velit laboris pariatur. Do tempor ex deserunt duis minim amet.\r\n","registered":"2020-03-18T11:12:21 -01:00","latitude":-24.356932,"longitude":27.184808,"tags":["new issue","bug"]}
|
||||
{"id":2,"isActive":true,"balance":"$2,467.47","picture":"http://placehold.it/32x32","age":34,"color":"blue","name":"Patricia Goff","gender":"female","email":"patriciagoff@chorizon.com","phone":"+1 (864) 463-2277","address":"866 Hornell Loop, Cresaptown, Ohio, 1700","about":"Non culpa duis dolore Lorem aliqua. Labore veniam laborum cupidatat nostrud ea exercitation. Esse nostrud sit veniam laborum minim ullamco nulla aliqua est cillum magna. Duis non esse excepteur veniam voluptate sunt cupidatat nostrud consequat sint adipisicing ut excepteur. Incididunt sit aliquip non id magna amet deserunt esse quis dolor.\r\n","registered":"2014-10-28T12:59:30 -01:00","latitude":-64.008555,"longitude":11.867098,"tags":["good first issue"]}
|
||||
{"id":3,"isActive":true,"balance":"$3,344.40","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Adeline Flynn","gender":"female","email":"adelineflynn@chorizon.com","phone":"+1 (994) 600-2840","address":"428 Paerdegat Avenue, Hollymead, Pennsylvania, 948","about":"Ex velit magna minim labore dolor id laborum incididunt. Proident dolor fugiat exercitation ad adipisicing amet dolore. Veniam nisi pariatur aute eu amet sint elit duis exercitation. Eu fugiat Lorem nostrud consequat aute sunt. Minim excepteur cillum laboris enim tempor adipisicing nulla reprehenderit ea velit Lorem qui in incididunt. Esse ipsum mollit deserunt ea exercitation ex aliqua anim magna cupidatat culpa.\r\n","registered":"2014-03-27T06:24:45 -01:00","latitude":-74.485173,"longitude":-11.059859,"tags":["bug","good first issue","wontfix","new issue"]}
|
||||
{"id":4,"isActive":false,"balance":"$2,575.78","picture":"http://placehold.it/32x32","age":39,"color":"Green","name":"Mariana Pacheco","gender":"female","email":"marianapacheco@chorizon.com","phone":"+1 (820) 414-2223","address":"664 Rapelye Street, Faywood, California, 7320","about":"Sint cillum enim eu Lorem dolore. Est excepteur cillum consequat incididunt. Ut consectetur et do culpa eiusmod ex ut id proident aliqua. Sunt dolor anim minim labore incididunt deserunt enim velit sunt ut in velit. Nulla ipsum cillum qui est minim officia in occaecat exercitation Lorem sunt. Aliqua minim excepteur tempor incididunt dolore. Quis amet ullamco et proident aliqua magna consequat.\r\n","registered":"2015-09-02T03:23:35 -02:00","latitude":75.763501,"longitude":-78.777124,"tags":["new issue"]}
|
||||
{"id":5,"isActive":true,"balance":"$3,793.09","picture":"http://placehold.it/32x32","age":20,"color":"Green","name":"Warren Watson","gender":"male","email":"warrenwatson@chorizon.com","phone":"+1 (807) 583-2427","address":"671 Prince Street, Faxon, Connecticut, 4275","about":"Cillum incididunt mollit labore ipsum elit ea. Lorem labore consectetur nulla ea fugiat sint esse cillum ea commodo id qui. Sint cillum mollit dolore enim quis esse. Nisi labore duis dolor tempor laborum laboris ad minim pariatur in excepteur sit. Aliqua anim amet sunt ullamco labore amet culpa irure esse eiusmod deserunt consequat Lorem nostrud.\r\n","registered":"2017-06-04T06:02:17 -02:00","latitude":29.979223,"longitude":25.358943,"tags":["wontfix","wontfix","wontfix"]}
|
||||
{"id":6,"isActive":true,"balance":"$2,919.70","picture":"http://placehold.it/32x32","age":20,"color":"blue","name":"Shelia Berry","gender":"female","email":"sheliaberry@chorizon.com","phone":"+1 (853) 511-2651","address":"437 Forrest Street, Coventry, Illinois, 2056","about":"Id occaecat qui voluptate proident culpa cillum nisi reprehenderit. Pariatur nostrud proident adipisicing reprehenderit eiusmod qui minim proident aliqua id cupidatat laboris deserunt. Proident sint laboris sit mollit dolor qui incididunt quis veniam cillum cupidatat ad nostrud ut. Aliquip consequat eiusmod eiusmod irure tempor do incididunt id culpa laboris eiusmod.\r\n","registered":"2018-07-11T02:45:01 -02:00","latitude":54.815991,"longitude":-118.690609,"tags":["good first issue","bug","wontfix","new issue"]}
|
||||
{"id":7,"isActive":true,"balance":"$1,349.50","picture":"http://placehold.it/32x32","age":28,"color":"Green","name":"Chrystal Boyd","gender":"female","email":"chrystalboyd@chorizon.com","phone":"+1 (936) 563-2802","address":"670 Croton Loop, Sussex, Florida, 4692","about":"Consequat ex voluptate consectetur laborum nulla. Qui voluptate Lorem amet labore est esse sunt. Nulla cupidatat consequat quis incididunt exercitation aliquip reprehenderit ea ea adipisicing reprehenderit id consectetur quis. Exercitation est incididunt ullamco non proident consequat. Nisi veniam aliquip fugiat voluptate ex id aute duis ullamco magna ipsum ad laborum ipsum. Cupidatat velit dolore esse nisi.\r\n","registered":"2016-11-01T07:36:04 -01:00","latitude":-24.711933,"longitude":147.246705,"tags":[]}
|
||||
{"id":8,"isActive":false,"balance":"$3,999.56","picture":"http://placehold.it/32x32","age":30,"color":"brown","name":"Martin Porter","gender":"male","email":"martinporter@chorizon.com","phone":"+1 (895) 580-2304","address":"577 Regent Place, Aguila, Guam, 6554","about":"Nostrud nulla labore ex excepteur labore enim cillum pariatur in do Lorem eiusmod ullamco est. Labore aliquip id ut nisi commodo pariatur ea esse laboris. Incididunt eu dolor esse excepteur nulla minim proident non cillum nisi dolore incididunt ipsum tempor.\r\n","registered":"2014-09-20T02:08:30 -02:00","latitude":-88.344273,"longitude":37.964466,"tags":[]}
|
||||
{"id":9,"isActive":true,"balance":"$3,729.71","picture":"http://placehold.it/32x32","age":26,"color":"blue","name":"Kelli Mendez","gender":"female","email":"kellimendez@chorizon.com","phone":"+1 (936) 401-2236","address":"242 Caton Place, Grazierville, Alabama, 3968","about":"Consectetur occaecat dolore esse eiusmod enim ea aliqua eiusmod amet velit laborum. Velit quis consequat consectetur velit fugiat labore commodo amet do. Magna minim est ad commodo consequat fugiat. Laboris duis Lorem ipsum irure sit ipsum consequat tempor sit. Est ad nulla duis quis velit anim id nulla. Cupidatat ea esse laboris eu veniam cupidatat proident veniam quis.\r\n","registered":"2018-05-04T10:35:30 -02:00","latitude":49.37551,"longitude":41.872323,"tags":["new issue","new issue"]}
|
||||
{"id":10,"isActive":false,"balance":"$1,127.47","picture":"http://placehold.it/32x32","age":27,"color":"blue","name":"Maddox Johns","gender":"male","email":"maddoxjohns@chorizon.com","phone":"+1 (892) 470-2357","address":"756 Beard Street, Avalon, Louisiana, 114","about":"Voluptate et dolor magna do do. Id do enim ut nulla esse culpa fugiat excepteur quis. Nostrud ad aliquip aliqua qui esse ut consequat proident deserunt esse cupidatat do elit fugiat. Sint cillum aliquip cillum laboris laborum laboris ad aliquip enim reprehenderit cillum eu sint. Sint ut ad duis do culpa non eiusmod amet non ipsum commodo. Pariatur aliquip sit deserunt non. Ut consequat pariatur deserunt veniam est sit eiusmod officia aliquip commodo sunt in eu duis.\r\n","registered":"2016-04-22T06:41:25 -02:00","latitude":66.640229,"longitude":-17.222666,"tags":["new issue","good first issue","good first issue","new issue"]}
|
||||
{"id":11,"isActive":true,"balance":"$1,351.43","picture":"http://placehold.it/32x32","age":28,"color":"Green","name":"Evans Wagner","gender":"male","email":"evanswagner@chorizon.com","phone":"+1 (889) 496-2332","address":"118 Monaco Place, Lutsen, Delaware, 6209","about":"Sunt consectetur enim ipsum consectetur occaecat reprehenderit nulla pariatur. Cupidatat do exercitation tempor voluptate duis nostrud dolor consectetur. Excepteur aliquip Lorem voluptate cillum est. Nisi velit nulla nostrud ea id officia laboris et.\r\n","registered":"2016-10-27T01:26:31 -02:00","latitude":-77.673222,"longitude":-142.657214,"tags":["good first issue","good first issue"]}
|
||||
{"id":12,"isActive":false,"balance":"$3,394.96","picture":"http://placehold.it/32x32","age":25,"color":"blue","name":"Aida Kirby","gender":"female","email":"aidakirby@chorizon.com","phone":"+1 (942) 532-2325","address":"797 Engert Avenue, Wilsonia, Idaho, 6532","about":"Mollit aute esse Lorem do laboris anim reprehenderit excepteur. Ipsum culpa esse voluptate officia cupidatat minim. Velit officia proident nostrud sunt irure labore. Culpa ex commodo amet dolor amet voluptate Lorem ex esse commodo fugiat quis non. Ex est adipisicing veniam sunt dolore ut aliqua nisi ex sit. Esse voluptate esse anim id adipisicing enim aute ea exercitation tempor cillum.\r\n","registered":"2018-06-18T04:39:57 -02:00","latitude":-58.062041,"longitude":34.999254,"tags":["new issue","wontfix","bug","new issue"]}
|
||||
{"id":13,"isActive":true,"balance":"$2,812.62","picture":"http://placehold.it/32x32","age":40,"color":"blue","name":"Nelda Burris","gender":"female","email":"neldaburris@chorizon.com","phone":"+1 (813) 600-2576","address":"160 Opal Court, Fowlerville, Tennessee, 2170","about":"Ipsum aliquip adipisicing elit magna. Veniam irure quis laborum laborum sint velit amet. Irure non eiusmod laborum fugiat qui quis Lorem culpa veniam commodo. Fugiat cupidatat dolore et consequat pariatur enim ex velit consequat deserunt quis. Deserunt et quis laborum cupidatat cillum minim cupidatat nisi do commodo commodo labore cupidatat ea. In excepteur sit nostrud nulla nostrud dolor sint. Et anim culpa aliquip laborum Lorem elit.\r\n","registered":"2015-08-15T12:39:53 -02:00","latitude":66.6871,"longitude":179.549488,"tags":["wontfix"]}
|
||||
{"id":14,"isActive":true,"balance":"$1,718.33","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Jennifer Hart","gender":"female","email":"jenniferhart@chorizon.com","phone":"+1 (850) 537-2513","address":"124 Veranda Place, Nash, Utah, 985","about":"Amet amet voluptate in occaecat pariatur. Nulla ipsum esse quis qui in quis qui. Non est non nisi qui tempor commodo consequat fugiat. Sint eu ipsum aute anim anim. Ea nostrud excepteur exercitation consectetur Lorem.\r\n","registered":"2016-09-04T11:46:59 -02:00","latitude":-66.827751,"longitude":99.220079,"tags":["wontfix","bug","new issue","new issue"]}
|
||||
{"id":15,"isActive":false,"balance":"$2,698.16","picture":"http://placehold.it/32x32","age":28,"color":"blue","name":"Aurelia Contreras","gender":"female","email":"aureliacontreras@chorizon.com","phone":"+1 (932) 442-3103","address":"655 Dwight Street, Grapeview, Palau, 8356","about":"Qui adipisicing consectetur aute veniam culpa ipsum. Occaecat occaecat ut mollit enim enim elit Lorem nostrud Lorem. Consequat laborum mollit nulla aute cillum sunt mollit commodo velit culpa. Pariatur pariatur velit nostrud tempor. In minim enim cillum exercitation in laboris labore ea sunt in incididunt fugiat.\r\n","registered":"2014-09-11T10:43:15 -02:00","latitude":-71.328973,"longitude":133.404895,"tags":["wontfix","bug","good first issue"]}
|
||||
{"id":16,"isActive":true,"balance":"$3,303.25","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Estella Bass","gender":"female","email":"estellabass@chorizon.com","phone":"+1 (825) 436-2909","address":"435 Rockwell Place, Garberville, Wisconsin, 2230","about":"Sit eiusmod mollit velit non. Qui ea in exercitation elit reprehenderit occaecat tempor minim officia. Culpa amet voluptate sit eiusmod pariatur.\r\n","registered":"2017-11-23T09:32:09 -01:00","latitude":81.17014,"longitude":-145.262693,"tags":["new issue"]}
|
||||
{"id":17,"isActive":false,"balance":"$3,579.20","picture":"http://placehold.it/32x32","age":25,"color":"brown","name":"Ortega Brennan","gender":"male","email":"ortegabrennan@chorizon.com","phone":"+1 (906) 526-2287","address":"440 Berry Street, Rivera, Maine, 1849","about":"Veniam velit non laboris consectetur sit aliquip enim proident velit in ipsum reprehenderit reprehenderit. Dolor qui nulla adipisicing ad magna dolore do ut duis et aute est. Qui est elit cupidatat nostrud. Laboris voluptate reprehenderit minim sint exercitation cupidatat ipsum sint consectetur velit sunt et officia incididunt. Ut amet Lorem minim deserunt officia officia irure qui et Lorem deserunt culpa sit.\r\n","registered":"2016-03-31T02:17:13 -02:00","latitude":-68.407524,"longitude":-113.642067,"tags":["new issue","wontfix"]}
|
||||
{"id":18,"isActive":false,"balance":"$1,484.92","picture":"http://placehold.it/32x32","age":39,"color":"blue","name":"Leonard Tillman","gender":"male","email":"leonardtillman@chorizon.com","phone":"+1 (864) 541-3456","address":"985 Provost Street, Charco, New Hampshire, 8632","about":"Consectetur ut magna sit id officia nostrud ipsum. Lorem cupidatat laborum nostrud aliquip magna qui est cupidatat exercitation et. Officia qui magna commodo id cillum magna ut ad veniam sunt sint ex. Id minim do in do exercitation aliquip incididunt ex esse. Nisi aliqua quis excepteur qui aute excepteur dolore eu pariatur irure id eu cupidatat eiusmod. Aliqua amet et dolore enim et eiusmod qui irure pariatur qui officia adipisicing nulla duis.\r\n","registered":"2018-05-06T08:21:27 -02:00","latitude":-8.581801,"longitude":-61.910062,"tags":["wontfix","new issue","bug","bug"]}
|
||||
{"id":19,"isActive":true,"balance":"$3,572.55","picture":"http://placehold.it/32x32","age":33,"color":"brown","name":"Dale Payne","gender":"male","email":"dalepayne@chorizon.com","phone":"+1 (814) 469-3499","address":"536 Dare Court, Ironton, Arkansas, 8605","about":"Et velit cupidatat velit incididunt mollit. Occaecat do labore aliqua dolore excepteur occaecat ut veniam ad ullamco tempor. Ut anim laboris deserunt culpa esse. Pariatur Lorem nulla cillum cupidatat nostrud Lorem commodo reprehenderit ut est. In dolor cillum reprehenderit laboris incididunt ad reprehenderit aute ipsum officia id in consequat. Culpa exercitation voluptate fugiat est Lorem ipsum in dolore dolor consequat Lorem et.\r\n","registered":"2019-10-11T01:01:33 -02:00","latitude":-18.280968,"longitude":-126.091797,"tags":["bug","wontfix","wontfix","wontfix"]}
|
||||
{"id":20,"isActive":true,"balance":"$1,986.48","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Florence Long","gender":"female","email":"florencelong@chorizon.com","phone":"+1 (972) 557-3858","address":"519 Hendrickson Street, Templeton, Hawaii, 2389","about":"Quis officia occaecat veniam veniam. Ex minim enim labore cupidatat qui. Proident esse deserunt laborum laboris sunt nostrud.\r\n","registered":"2016-05-02T09:18:59 -02:00","latitude":-27.110866,"longitude":-45.09445,"tags":[]}
|
||||
{"id":21,"isActive":true,"balance":"$1,440.09","picture":"http://placehold.it/32x32","age":40,"color":"blue","name":"Levy Whitley","gender":"male","email":"levywhitley@chorizon.com","phone":"+1 (911) 458-2411","address":"187 Thomas Street, Hachita, North Carolina, 2989","about":"Velit laboris non minim elit sint deserunt fugiat. Aute minim ex commodo aute cillum aliquip fugiat pariatur nulla eiusmod pariatur consectetur. Qui ex ea qui laborum veniam adipisicing magna minim ut. In irure anim voluptate mollit et. Adipisicing labore ea mollit magna aliqua culpa velit est. Excepteur nisi veniam enim velit in ad officia irure laboris.\r\n","registered":"2014-04-30T07:31:38 -02:00","latitude":-6.537315,"longitude":171.813536,"tags":["bug"]}
|
||||
{"id":22,"isActive":false,"balance":"$2,938.57","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Bernard Mcfarland","gender":"male","email":"bernardmcfarland@chorizon.com","phone":"+1 (979) 442-3386","address":"409 Hall Street, Keyport, Federated States Of Micronesia, 7011","about":"Reprehenderit irure aute et anim ullamco enim est tempor id ipsum mollit veniam aute ullamco. Consectetur dolor velit tempor est reprehenderit ut id non est ullamco voluptate. Commodo aute ullamco culpa non voluptate incididunt non culpa culpa nisi id proident cupidatat.\r\n","registered":"2017-08-10T10:07:59 -02:00","latitude":63.766795,"longitude":68.177069,"tags":[]}
|
||||
{"id":23,"isActive":true,"balance":"$1,678.49","picture":"http://placehold.it/32x32","age":31,"color":"brown","name":"Blanca Mcclain","gender":"female","email":"blancamcclain@chorizon.com","phone":"+1 (976) 439-2772","address":"176 Crooke Avenue, Valle, Virginia, 5373","about":"Aliquip sunt irure ut consectetur elit. Cillum amet incididunt et anim elit in incididunt adipisicing fugiat veniam esse veniam. Nisi qui sit occaecat tempor nostrud est aute cillum anim excepteur laboris magna in. Fugiat fugiat veniam cillum laborum ut pariatur amet nulla nulla. Nostrud mollit in laborum minim exercitation aute. Lorem aute ipsum laboris est adipisicing qui ullamco tempor adipisicing cupidatat mollit.\r\n","registered":"2015-10-12T11:57:28 -02:00","latitude":-8.944564,"longitude":-150.711709,"tags":["bug","wontfix","good first issue"]}
|
||||
{"id":24,"isActive":true,"balance":"$2,276.87","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Espinoza Ford","gender":"male","email":"espinozaford@chorizon.com","phone":"+1 (945) 429-3975","address":"137 Bowery Street, Itmann, District Of Columbia, 1864","about":"Deserunt nisi aliquip esse occaecat laborum qui aliqua excepteur ea cupidatat dolore magna consequat. Culpa aliquip cillum incididunt proident est officia consequat duis. Elit tempor ut cupidatat nisi ea sint non labore aliquip amet. Deserunt labore cupidatat laboris dolor duis occaecat velit aliquip reprehenderit esse. Sit ad qui consectetur id anim nisi amet eiusmod.\r\n","registered":"2014-03-26T02:16:08 -01:00","latitude":-37.137666,"longitude":-51.811757,"tags":["wontfix","bug"]}
|
||||
{"id":25,"isActive":true,"balance":"$3,973.43","picture":"http://placehold.it/32x32","age":29,"color":"Green","name":"Sykes Conley","gender":"male","email":"sykesconley@chorizon.com","phone":"+1 (851) 401-3916","address":"345 Grand Street, Woodlands, Missouri, 4461","about":"Pariatur ullamco duis reprehenderit ad sit dolore. Dolore ex fugiat labore incididunt nostrud. Minim deserunt officia sunt enim magna elit veniam reprehenderit nisi cupidatat dolor eiusmod. Veniam laboris sint cillum et laboris nostrud culpa laboris anim. Incididunt velit pariatur cupidatat sit dolore in. Voluptate consectetur officia id nostrud velit mollit dolor. Id laboris consectetur culpa sunt pariatur minim sunt laboris sit.\r\n","registered":"2015-09-12T06:03:56 -02:00","latitude":67.282955,"longitude":-64.341323,"tags":["wontfix"]}
|
||||
{"id":26,"isActive":false,"balance":"$1,431.50","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Barlow Duran","gender":"male","email":"barlowduran@chorizon.com","phone":"+1 (995) 436-2562","address":"481 Everett Avenue, Allison, Nebraska, 3065","about":"Proident quis eu officia adipisicing aliquip. Lorem laborum magna dolor et incididunt cillum excepteur et amet. Veniam consectetur officia fugiat magna consequat dolore elit aute exercitation fugiat excepteur ullamco. Sit qui proident reprehenderit ea ad qui culpa exercitation reprehenderit anim cupidatat. Nulla et duis Lorem cillum duis pariatur amet voluptate labore ut aliqua mollit anim ea. Nostrud incididunt et proident adipisicing non consequat tempor ullamco adipisicing incididunt. Incididunt cupidatat tempor fugiat officia qui eiusmod reprehenderit.\r\n","registered":"2017-06-29T04:28:43 -02:00","latitude":-38.70606,"longitude":55.02816,"tags":["new issue"]}
|
||||
{"id":27,"isActive":true,"balance":"$3,478.27","picture":"http://placehold.it/32x32","age":31,"color":"blue","name":"Schwartz Morgan","gender":"male","email":"schwartzmorgan@chorizon.com","phone":"+1 (861) 507-2067","address":"451 Lincoln Road, Fairlee, Washington, 2717","about":"Labore eiusmod sint dolore sunt eiusmod esse et in id aliquip. Aliqua consequat occaecat laborum labore ipsum enim non nostrud adipisicing adipisicing cillum occaecat. Duis minim est culpa sunt nulla ullamco adipisicing magna irure. Occaecat quis irure eiusmod fugiat quis commodo reprehenderit labore cillum commodo id et.\r\n","registered":"2016-05-10T08:34:54 -02:00","latitude":-75.886403,"longitude":93.044471,"tags":["bug","bug","wontfix","wontfix"]}
|
||||
{"id":28,"isActive":true,"balance":"$2,825.59","picture":"http://placehold.it/32x32","age":32,"color":"blue","name":"Kristy Leon","gender":"female","email":"kristyleon@chorizon.com","phone":"+1 (948) 465-2563","address":"594 Macon Street, Floris, South Dakota, 3565","about":"Proident veniam voluptate magna id do. Laboris enim dolor culpa quis. Esse voluptate elit commodo duis incididunt velit aliqua. Qui aute commodo incididunt elit eu Lorem dolore. Non esse duis do reprehenderit culpa minim. Ullamco consequat id do exercitation exercitation mollit ipsum velit eiusmod quis.\r\n","registered":"2014-12-14T04:10:29 -01:00","latitude":-50.01615,"longitude":-68.908804,"tags":["wontfix","good first issue"]}
|
||||
{"id":29,"isActive":false,"balance":"$3,028.03","picture":"http://placehold.it/32x32","age":39,"color":"blue","name":"Ashley Pittman","gender":"male","email":"ashleypittman@chorizon.com","phone":"+1 (928) 507-3523","address":"646 Adelphi Street, Clara, Colorado, 6056","about":"Incididunt cillum consectetur nulla sit sit labore nulla sit. Ullamco nisi mollit reprehenderit tempor irure in Lorem duis. Sunt eu aute laboris dolore commodo ipsum sint cupidatat veniam amet culpa incididunt aute ad. Quis dolore aliquip id aute mollit eiusmod nisi ipsum ut labore adipisicing do culpa.\r\n","registered":"2016-01-07T10:40:48 -01:00","latitude":-58.766037,"longitude":-124.828485,"tags":["wontfix"]}
|
||||
{"id":30,"isActive":true,"balance":"$2,021.11","picture":"http://placehold.it/32x32","age":32,"color":"blue","name":"Stacy Espinoza","gender":"female","email":"stacyespinoza@chorizon.com","phone":"+1 (999) 487-3253","address":"931 Alabama Avenue, Bangor, Alaska, 8215","about":"Id reprehenderit cupidatat exercitation anim ad nisi irure. Minim est proident mollit laborum. Duis ad duis eiusmod quis.\r\n","registered":"2014-07-16T06:15:53 -02:00","latitude":41.560197,"longitude":177.697,"tags":["new issue","new issue","bug"]}
|
||||
{"id":31,"isActive":false,"balance":"$3,609.82","picture":"http://placehold.it/32x32","age":32,"color":"blue","name":"Vilma Garza","gender":"female","email":"vilmagarza@chorizon.com","phone":"+1 (944) 585-2021","address":"565 Tech Place, Sedley, Puerto Rico, 858","about":"Excepteur et fugiat mollit incididunt cupidatat. Mollit nisi veniam sint eu exercitation amet labore. Voluptate est magna est amet qui minim excepteur cupidatat dolor quis id excepteur aliqua reprehenderit. Proident nostrud ex veniam officia nisi enim occaecat ex magna officia id consectetur ad eu. In et est reprehenderit cupidatat ad minim veniam proident nulla elit nisi veniam proident ex. Eu in irure sit veniam amet incididunt fugiat proident quis ullamco laboris.\r\n","registered":"2017-06-30T07:43:52 -02:00","latitude":-12.574889,"longitude":-54.771186,"tags":["new issue","wontfix","wontfix"]}
|
||||
{"id":32,"isActive":false,"balance":"$2,882.34","picture":"http://placehold.it/32x32","age":38,"color":"brown","name":"June Dunlap","gender":"female","email":"junedunlap@chorizon.com","phone":"+1 (997) 504-2937","address":"353 Cozine Avenue, Goodville, Indiana, 1438","about":"Non dolore ut Lorem dolore amet veniam fugiat reprehenderit ut amet ea ut. Non aliquip cillum ad occaecat non et sint quis proident velit laborum ullamco et. Quis qui tempor eu voluptate et proident duis est commodo laboris ex enim. Nisi aliquip laboris nostrud veniam aliqua ullamco. Et officia proident dolor aliqua incididunt veniam proident.\r\n","registered":"2016-08-23T08:54:11 -02:00","latitude":-27.883363,"longitude":-163.919683,"tags":["new issue","new issue","bug","wontfix"]}
|
||||
{"id":33,"isActive":true,"balance":"$3,556.54","picture":"http://placehold.it/32x32","age":33,"color":"brown","name":"Cecilia Greer","gender":"female","email":"ceciliagreer@chorizon.com","phone":"+1 (977) 573-3498","address":"696 Withers Street, Lydia, Oklahoma, 3220","about":"Dolor pariatur veniam ad enim eiusmod fugiat ullamco nulla veniam. Dolore dolor sit excepteur veniam adipisicing adipisicing excepteur commodo qui reprehenderit magna exercitation enim reprehenderit. Cupidatat eu ullamco excepteur sint do. Et cupidatat ex adipisicing veniam eu tempor reprehenderit ut eiusmod amet proident veniam nostrud. Tempor ex enim mollit laboris magna tempor. Et aliqua nostrud esse pariatur quis. Ut pariatur ea ipsum pariatur.\r\n","registered":"2017-01-13T11:30:12 -01:00","latitude":60.467215,"longitude":84.684575,"tags":["wontfix","good first issue","good first issue","wontfix"]}
|
||||
{"id":34,"isActive":true,"balance":"$1,413.35","picture":"http://placehold.it/32x32","age":33,"color":"brown","name":"Mckay Schroeder","gender":"male","email":"mckayschroeder@chorizon.com","phone":"+1 (816) 480-3657","address":"958 Miami Court, Rehrersburg, Northern Mariana Islands, 567","about":"Amet do velit excepteur tempor sit eu voluptate. Excepteur amet culpa ipsum in pariatur mollit amet nisi veniam. Laboris elit consectetur id anim qui laboris. Reprehenderit mollit laboris occaecat esse sunt Lorem Lorem sunt occaecat.\r\n","registered":"2016-02-08T04:50:15 -01:00","latitude":-72.413287,"longitude":-159.254371,"tags":["good first issue"]}
|
||||
{"id":35,"isActive":true,"balance":"$2,306.53","picture":"http://placehold.it/32x32","age":34,"color":"blue","name":"Sawyer Mccormick","gender":"male","email":"sawyermccormick@chorizon.com","phone":"+1 (829) 569-3012","address":"749 Apollo Street, Eastvale, Texas, 7373","about":"Est irure ex occaecat aute. Lorem ad ullamco esse cillum deserunt qui proident anim officia dolore. Incididunt tempor cupidatat nulla cupidatat ullamco reprehenderit Lorem. Laboris tempor do pariatur sint non officia id qui deserunt amet Lorem pariatur consectetur exercitation. Adipisicing reprehenderit pariatur duis ex cupidatat cillum ad laboris ex. Sunt voluptate pariatur esse amet dolore minim aliquip reprehenderit nisi velit mollit.\r\n","registered":"2019-11-30T11:53:23 -01:00","latitude":-48.978194,"longitude":110.950191,"tags":["good first issue","new issue","new issue","bug"]}
|
||||
{"id":36,"isActive":false,"balance":"$1,844.54","picture":"http://placehold.it/32x32","age":37,"color":"brown","name":"Barbra Valenzuela","gender":"female","email":"barbravalenzuela@chorizon.com","phone":"+1 (992) 512-2649","address":"617 Schenck Court, Reinerton, Michigan, 2908","about":"Deserunt adipisicing nisi et amet aliqua amet. Veniam occaecat et elit excepteur veniam. Aute irure culpa nostrud occaecat. Excepteur sit aute mollit commodo. Do ex pariatur consequat sint Lorem veniam laborum excepteur. Non voluptate ex laborum enim irure. Adipisicing excepteur anim elit esse.\r\n","registered":"2019-03-29T01:59:31 -01:00","latitude":45.193723,"longitude":-12.486778,"tags":["new issue","new issue","wontfix","wontfix"]}
|
||||
{"id":37,"isActive":false,"balance":"$3,469.82","picture":"http://placehold.it/32x32","age":39,"color":"brown","name":"Opal Weiss","gender":"female","email":"opalweiss@chorizon.com","phone":"+1 (809) 400-3079","address":"535 Bogart Street, Frizzleburg, Arizona, 5222","about":"Reprehenderit nostrud minim adipisicing voluptate nisi consequat id sint. Proident tempor est esse cupidatat minim irure esse do do sint dolor. In officia duis et voluptate Lorem minim cupidatat ipsum enim qui dolor quis in Lorem. Aliquip commodo ex quis exercitation reprehenderit. Lorem id reprehenderit cillum adipisicing sunt ipsum incididunt incididunt.\r\n","registered":"2019-09-04T07:22:28 -02:00","latitude":72.50376,"longitude":61.656435,"tags":["bug","bug","good first issue","good first issue"]}
|
||||
{"id":38,"isActive":true,"balance":"$1,992.38","picture":"http://placehold.it/32x32","age":40,"color":"Green","name":"Christina Short","gender":"female","email":"christinashort@chorizon.com","phone":"+1 (884) 589-2705","address":"594 Willmohr Street, Dexter, Montana, 660","about":"Quis commodo eu dolor incididunt. Nisi magna mollit nostrud do consequat irure exercitation mollit aute deserunt. Magna aute quis occaecat incididunt deserunt tempor nostrud sint ullamco ipsum. Anim in occaecat exercitation laborum nostrud eiusmod reprehenderit ea culpa et sit. Culpa voluptate consectetur nostrud do eu fugiat excepteur officia pariatur enim duis amet.\r\n","registered":"2014-01-21T09:31:56 -01:00","latitude":-42.762739,"longitude":77.052349,"tags":["bug","new issue"]}
|
||||
{"id":39,"isActive":false,"balance":"$1,722.85","picture":"http://placehold.it/32x32","age":29,"color":"brown","name":"Golden Horton","gender":"male","email":"goldenhorton@chorizon.com","phone":"+1 (903) 426-2489","address":"191 Schenck Avenue, Mayfair, North Dakota, 5000","about":"Cillum velit aliqua velit in quis do mollit in et veniam. Nostrud proident non irure commodo. Ea culpa duis enim adipisicing do sint et est culpa reprehenderit officia laborum. Non et nostrud tempor nostrud nostrud ea duis esse laboris occaecat laborum. In eu ipsum sit tempor esse eiusmod enim aliquip aute. Officia ea anim ea ea. Consequat aute deserunt tempor nulla nisi tempor velit.\r\n","registered":"2015-08-19T02:56:41 -02:00","latitude":69.922534,"longitude":9.881433,"tags":["bug"]}
|
||||
{"id":40,"isActive":false,"balance":"$1,656.54","picture":"http://placehold.it/32x32","age":21,"color":"blue","name":"Stafford Emerson","gender":"male","email":"staffordemerson@chorizon.com","phone":"+1 (992) 455-2573","address":"523 Thornton Street, Conway, Vermont, 6331","about":"Adipisicing cupidatat elit minim elit nostrud elit non eiusmod sunt ut. Enim minim irure officia irure occaecat mollit eu nostrud eiusmod adipisicing sunt. Elit deserunt commodo minim dolor qui. Nostrud officia ex proident mollit et dolor tempor pariatur. Ex consequat tempor eiusmod irure mollit cillum laboris est veniam ea mollit deserunt. Tempor sit voluptate excepteur elit ullamco.\r\n","registered":"2019-02-16T04:07:08 -01:00","latitude":-29.143111,"longitude":-57.207703,"tags":["wontfix","good first issue","good first issue"]}
|
||||
{"id":41,"isActive":false,"balance":"$1,861.56","picture":"http://placehold.it/32x32","age":21,"color":"brown","name":"Salinas Gamble","gender":"male","email":"salinasgamble@chorizon.com","phone":"+1 (901) 525-2373","address":"991 Nostrand Avenue, Kansas, Mississippi, 6756","about":"Consequat tempor adipisicing cupidatat aliquip. Mollit proident incididunt ad ipsum laborum. Dolor in elit minim aliquip aliquip voluptate reprehenderit mollit eiusmod excepteur aliquip minim nulla cupidatat.\r\n","registered":"2017-08-21T05:47:53 -02:00","latitude":-22.593819,"longitude":-63.613004,"tags":["good first issue","bug","bug","wontfix"]}
|
||||
{"id":42,"isActive":true,"balance":"$3,179.74","picture":"http://placehold.it/32x32","age":34,"color":"brown","name":"Graciela Russell","gender":"female","email":"gracielarussell@chorizon.com","phone":"+1 (893) 464-3951","address":"361 Greenpoint Avenue, Shrewsbury, New Jersey, 4713","about":"Ex amet duis incididunt consequat minim dolore deserunt reprehenderit adipisicing in mollit aliqua adipisicing sunt. In ullamco eu qui est eiusmod qui. Fugiat esse est Lorem dolore nisi mollit exercitation. Aliquip occaecat esse exercitation ex non aute velit excepteur duis aliquip id. Velit id non aliquip fugiat minim qui exercitation culpa tempor consectetur. Minim dolor labore ea aute aute eu.\r\n","registered":"2015-05-18T09:52:56 -02:00","latitude":-14.634444,"longitude":12.931783,"tags":["wontfix","bug","wontfix"]}
|
||||
{"id":43,"isActive":true,"balance":"$1,777.38","picture":"http://placehold.it/32x32","age":25,"color":"blue","name":"Arnold Bender","gender":"male","email":"arnoldbender@chorizon.com","phone":"+1 (945) 581-3808","address":"781 Lorraine Street, Gallina, American Samoa, 1832","about":"Et mollit laboris duis ut duis eiusmod aute laborum duis irure labore deserunt. Ut occaecat ullamco quis excepteur. Et commodo non sint laboris tempor laboris aliqua consequat magna ea aute minim tempor pariatur. Dolore occaecat qui irure Lorem nulla consequat non.\r\n","registered":"2018-12-23T02:26:30 -01:00","latitude":41.208579,"longitude":51.948925,"tags":["bug","good first issue","good first issue","wontfix"]}
|
||||
{"id":44,"isActive":true,"balance":"$2,893.45","picture":"http://placehold.it/32x32","age":22,"color":"Green","name":"Joni Spears","gender":"female","email":"jonispears@chorizon.com","phone":"+1 (916) 565-2124","address":"307 Harwood Place, Canterwood, Maryland, 2047","about":"Dolore consequat deserunt aliquip duis consequat minim occaecat enim est. Nulla aute reprehenderit est enim duis cillum ullamco aliquip eiusmod sunt. Labore eiusmod aliqua Lorem velit aliqua quis ex mollit mollit duis culpa et qui in. Cupidatat est id ullamco irure dolor nulla.\r\n","registered":"2015-03-01T12:38:28 -01:00","latitude":8.19071,"longitude":146.323808,"tags":["wontfix","new issue","good first issue","good first issue"]}
|
||||
{"id":45,"isActive":true,"balance":"$2,830.36","picture":"http://placehold.it/32x32","age":20,"color":"brown","name":"Irene Bennett","gender":"female","email":"irenebennett@chorizon.com","phone":"+1 (904) 431-2211","address":"353 Ridgecrest Terrace, Springdale, Marshall Islands, 2686","about":"Consectetur Lorem dolor reprehenderit sunt duis. Pariatur non velit velit veniam elit reprehenderit in. Aute quis Lorem quis pariatur Lorem incididunt nulla magna adipisicing. Et id occaecat labore officia occaecat occaecat adipisicing.\r\n","registered":"2018-04-17T05:18:51 -02:00","latitude":-36.435177,"longitude":-127.552573,"tags":["bug","wontfix"]}
|
||||
{"id":46,"isActive":true,"balance":"$1,348.04","picture":"http://placehold.it/32x32","age":34,"color":"Green","name":"Lawson Curtis","gender":"male","email":"lawsoncurtis@chorizon.com","phone":"+1 (896) 532-2172","address":"942 Gerritsen Avenue, Southmont, Kansas, 8915","about":"Amet consectetur minim aute nostrud excepteur sint labore in culpa. Mollit qui quis ea amet sint ex incididunt nulla. Elit id esse ea consectetur laborum consequat occaecat aute consectetur ex. Commodo duis aute elit occaecat cupidatat non consequat ad officia qui dolore nostrud reprehenderit. Occaecat velit velit adipisicing exercitation consectetur. Incididunt et amet nostrud tempor do esse ullamco est Lorem irure. Eu aliqua eu exercitation sint.\r\n","registered":"2016-08-23T01:41:09 -02:00","latitude":-48.783539,"longitude":20.492944,"tags":[]}
|
||||
{"id":47,"isActive":true,"balance":"$1,132.41","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Goff May","gender":"male","email":"goffmay@chorizon.com","phone":"+1 (859) 453-3415","address":"225 Rutledge Street, Boonville, Massachusetts, 4081","about":"Sint occaecat velit anim sint reprehenderit est. Adipisicing ea pariatur amet id non ex. Aute id laborum tempor aliquip magna ex eu incididunt aliquip eiusmod elit quis dolor. Anim est minim deserunt amet exercitation nulla elit nulla nulla culpa ullamco. Velit consectetur ipsum amet proident labore excepteur ut id excepteur voluptate commodo. Exercitation et laboris labore esse est laboris consectetur et sint.\r\n","registered":"2014-10-25T07:32:30 -02:00","latitude":13.079225,"longitude":76.215086,"tags":["bug"]}
|
||||
{"id":48,"isActive":true,"balance":"$1,201.87","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Goodman Becker","gender":"male","email":"goodmanbecker@chorizon.com","phone":"+1 (825) 470-3437","address":"388 Seigel Street, Sisquoc, Kentucky, 8231","about":"Velit excepteur aute esse fugiat laboris aliqua magna. Est ex sit do labore ullamco aliquip. Duis ea commodo nostrud in fugiat. Aliqua consequat mollit dolore excepteur nisi ullamco commodo ea nostrud ea minim. Minim occaecat ut laboris ea consectetur veniam ipsum qui sit tempor incididunt anim amet eu. Velit sint incididunt eu adipisicing ipsum qui labore. Anim commodo labore reprehenderit aliquip labore elit minim deserunt amet exercitation officia non ea consectetur.\r\n","registered":"2019-09-05T04:49:03 -02:00","latitude":-23.792094,"longitude":-13.621221,"tags":["bug","bug","wontfix","new issue"]}
|
||||
{"id":49,"isActive":true,"balance":"$1,476.39","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Maureen Dale","gender":"female","email":"maureendale@chorizon.com","phone":"+1 (984) 538-3684","address":"817 Newton Street, Bannock, Wyoming, 1468","about":"Tempor mollit exercitation excepteur cupidatat reprehenderit ad ex. Nulla laborum proident incididunt quis. Esse laborum deserunt qui anim. Sunt incididunt pariatur cillum anim proident eu ullamco dolor excepteur. Ullamco amet culpa nostrud adipisicing duis aliqua consequat duis non eu id mollit velit. Deserunt ullamco amet in occaecat.\r\n","registered":"2018-04-26T06:04:40 -02:00","latitude":-64.196802,"longitude":-117.396238,"tags":["wontfix"]}
|
||||
{"id":50,"isActive":true,"balance":"$1,947.08","picture":"http://placehold.it/32x32","age":21,"color":"Green","name":"Guerra Mcintyre","gender":"male","email":"guerramcintyre@chorizon.com","phone":"+1 (951) 536-2043","address":"423 Lombardy Street, Stewart, West Virginia, 908","about":"Sunt proident proident deserunt exercitation consectetur deserunt labore non commodo amet. Duis aute aliqua amet deserunt consectetur velit. Quis Lorem dolore occaecat deserunt reprehenderit non esse ullamco nostrud enim sunt ea fugiat. Elit amet veniam eu magna tempor. Mollit cupidatat laboris ex deserunt et labore sit tempor nostrud anim. Tempor aliqua occaecat voluptate reprehenderit eiusmod aliqua incididunt officia.\r\n","registered":"2015-07-16T05:11:42 -02:00","latitude":79.733743,"longitude":-20.602356,"tags":["bug","good first issue","good first issue"]}
|
||||
{"id":51,"isActive":true,"balance":"$2,960.90","picture":"http://placehold.it/32x32","age":23,"color":"blue","name":"Key Cervantes","gender":"male","email":"keycervantes@chorizon.com","phone":"+1 (931) 474-3865","address":"410 Barbey Street, Vernon, Oregon, 2328","about":"Duis amet minim eu consectetur laborum ad exercitation eiusmod nulla velit cillum consectetur. Nostrud aliqua cillum minim veniam quis do cupidatat mollit laborum. Culpa fugiat consectetur cillum non occaecat tempor non fugiat esse pariatur in ullamco. Occaecat amet officia et culpa officia deserunt in qui magna aute consequat eiusmod.\r\n","registered":"2019-12-15T12:13:35 -01:00","latitude":47.627647,"longitude":117.049918,"tags":["new issue"]}
|
||||
{"id":52,"isActive":false,"balance":"$1,884.02","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Karen Nelson","gender":"female","email":"karennelson@chorizon.com","phone":"+1 (993) 528-3607","address":"930 Frank Court, Dunbar, New York, 8810","about":"Occaecat officia veniam consectetur aliqua laboris dolor irure nulla. Lorem ipsum sit nisi veniam mollit ea sint nisi irure. Eiusmod officia do laboris nostrud enim ullamco nulla officia in Lorem qui. Sint sunt incididunt quis reprehenderit incididunt. Sit dolore nulla consequat ea magna.\r\n","registered":"2014-06-23T09:21:44 -02:00","latitude":-59.059033,"longitude":76.565373,"tags":["new issue","bug"]}
|
||||
{"id":53,"isActive":true,"balance":"$3,559.55","picture":"http://placehold.it/32x32","age":32,"color":"brown","name":"Caitlin Burnett","gender":"female","email":"caitlinburnett@chorizon.com","phone":"+1 (945) 480-2796","address":"516 Senator Street, Emory, Iowa, 4145","about":"In aliqua ea esse in. Magna aute cupidatat culpa enim proident ad adipisicing laborum consequat exercitation nisi. Qui esse aliqua duis anim nulla esse enim nostrud ipsum tempor. Lorem deserunt ullamco do mollit culpa ipsum duis Lorem velit duis occaecat.\r\n","registered":"2019-01-09T02:26:31 -01:00","latitude":-82.774237,"longitude":42.316194,"tags":["bug","good first issue"]}
|
||||
{"id":54,"isActive":true,"balance":"$2,113.29","picture":"http://placehold.it/32x32","age":28,"color":"Green","name":"Richards Walls","gender":"male","email":"richardswalls@chorizon.com","phone":"+1 (865) 517-2982","address":"959 Brightwater Avenue, Stevens, Nevada, 2968","about":"Ad aute Lorem non pariatur anim ullamco ad amet eiusmod tempor velit. Mollit et tempor nisi aute adipisicing exercitation mollit do amet amet est fugiat enim. Ex voluptate nulla id tempor officia ullamco cillum dolor irure irure mollit et magna nisi. Pariatur voluptate qui laboris dolor id. Eu ipsum nulla dolore aute voluptate deserunt anim aliqua. Ut enim enim velit officia est nisi. Duis amet ut veniam aliquip minim tempor Lorem amet Lorem dolor duis.\r\n","registered":"2014-09-25T06:51:22 -02:00","latitude":80.09202,"longitude":87.49759,"tags":["wontfix","wontfix","bug"]}
|
||||
{"id":55,"isActive":true,"balance":"$1,977.66","picture":"http://placehold.it/32x32","age":36,"color":"brown","name":"Combs Stanley","gender":"male","email":"combsstanley@chorizon.com","phone":"+1 (827) 419-2053","address":"153 Beverley Road, Siglerville, South Carolina, 3666","about":"Commodo ullamco consequat eu ipsum eiusmod aute voluptate in. Ea laboris id deserunt nostrud pariatur et laboris minim tempor quis qui consequat non esse. Magna elit commodo mollit veniam Lorem enim nisi pariatur. Nisi non nisi adipisicing ea ipsum laborum dolore cillum. Amet do nisi esse laboris ipsum proident non veniam ullamco ea cupidatat sunt. Aliquip aute cillum quis laboris consectetur enim eiusmod nisi non id ullamco cupidatat sunt.\r\n","registered":"2019-08-22T07:53:15 -02:00","latitude":78.386181,"longitude":143.661058,"tags":[]}
|
||||
{"id":56,"isActive":false,"balance":"$3,886.12","picture":"http://placehold.it/32x32","age":23,"color":"brown","name":"Tucker Barry","gender":"male","email":"tuckerbarry@chorizon.com","phone":"+1 (808) 544-3433","address":"805 Jamaica Avenue, Cornfields, Minnesota, 3689","about":"Enim est sunt ullamco nulla aliqua commodo. Enim minim veniam non fugiat id tempor ad velit quis velit ad sunt consectetur laborum. Cillum deserunt tempor est adipisicing Lorem esse qui. Magna quis sunt cillum ea officia adipisicing eiusmod eu et nisi consectetur.\r\n","registered":"2016-08-29T07:28:00 -02:00","latitude":71.701551,"longitude":9.903068,"tags":[]}
|
||||
{"id":57,"isActive":false,"balance":"$1,844.56","picture":"http://placehold.it/32x32","age":20,"color":"Green","name":"Kaitlin Conner","gender":"female","email":"kaitlinconner@chorizon.com","phone":"+1 (862) 467-2666","address":"501 Knight Court, Joppa, Rhode Island, 274","about":"Occaecat id reprehenderit pariatur ea. Incididunt laborum reprehenderit ipsum velit labore excepteur nostrud voluptate officia ut culpa. Sint sunt in qui duis cillum aliqua do ullamco. Non do aute excepteur non labore sint consectetur tempor ad ea fugiat commodo labore. Dolor tempor culpa Lorem voluptate esse nostrud anim tempor irure reprehenderit. Deserunt ipsum cillum fugiat ut labore labore anim. In aliqua sunt dolore irure reprehenderit voluptate commodo consequat mollit amet laboris sit anim.\r\n","registered":"2019-05-30T06:38:24 -02:00","latitude":15.613464,"longitude":171.965629,"tags":[]}
|
||||
{"id":58,"isActive":true,"balance":"$2,876.10","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Mamie Fischer","gender":"female","email":"mamiefischer@chorizon.com","phone":"+1 (948) 545-3901","address":"599 Hunterfly Place, Haena, Georgia, 6005","about":"Cillum eu aliquip ipsum anim in dolore labore ea. Laboris velit esse ea ea aute do adipisicing ullamco elit laborum aute tempor. Esse consectetur quis irure occaecat nisi cillum et consectetur cillum cillum quis quis commodo.\r\n","registered":"2019-05-27T05:07:10 -02:00","latitude":70.915079,"longitude":-48.813584,"tags":["bug","wontfix","wontfix","good first issue"]}
|
||||
{"id":59,"isActive":true,"balance":"$1,921.58","picture":"http://placehold.it/32x32","age":31,"color":"Green","name":"Harper Carson","gender":"male","email":"harpercarson@chorizon.com","phone":"+1 (912) 430-3243","address":"883 Dennett Place, Knowlton, New Mexico, 9219","about":"Exercitation minim esse proident cillum velit et deserunt incididunt adipisicing minim. Cillum Lorem consectetur laborum id consequat exercitation velit. Magna dolor excepteur sunt deserunt dolor ullamco non sint proident ipsum. Reprehenderit voluptate sit veniam consectetur ea sunt duis labore deserunt ipsum aute. Eiusmod aliqua anim voluptate id duis tempor aliqua commodo sunt. Do officia ea consectetur nostrud eiusmod laborum.\r\n","registered":"2019-12-07T07:33:15 -01:00","latitude":-60.812605,"longitude":-27.129016,"tags":["bug","new issue"]}
|
||||
{"id":60,"isActive":true,"balance":"$1,770.93","picture":"http://placehold.it/32x32","age":23,"color":"brown","name":"Jody Herrera","gender":"female","email":"jodyherrera@chorizon.com","phone":"+1 (890) 583-3222","address":"261 Jay Street, Strykersville, Ohio, 9248","about":"Sit adipisicing pariatur irure non sint cupidatat ex ipsum pariatur exercitation ea. Enim consequat enim eu eu sint eu elit ex esse aliquip. Pariatur ipsum dolore veniam nisi id tempor elit exercitation dolore ad fugiat labore velit.\r\n","registered":"2016-05-21T01:00:02 -02:00","latitude":-36.846586,"longitude":131.156223,"tags":[]}
|
||||
{"id":61,"isActive":false,"balance":"$2,813.41","picture":"http://placehold.it/32x32","age":37,"color":"Green","name":"Charles Castillo","gender":"male","email":"charlescastillo@chorizon.com","phone":"+1 (934) 467-2108","address":"675 Morton Street, Rew, Pennsylvania, 137","about":"Velit amet laborum amet sunt sint sit cupidatat deserunt dolor laborum consectetur veniam. Minim cupidatat amet exercitation nostrud ex deserunt ad Lorem amet aute consectetur labore reprehenderit. Minim mollit aliqua et deserunt ex nisi. Id irure dolor labore consequat ipsum consectetur.\r\n","registered":"2019-06-10T02:54:22 -02:00","latitude":-16.423202,"longitude":-146.293752,"tags":["new issue","new issue"]}
|
||||
{"id":62,"isActive":true,"balance":"$3,341.35","picture":"http://placehold.it/32x32","age":33,"color":"blue","name":"Estelle Ramirez","gender":"female","email":"estelleramirez@chorizon.com","phone":"+1 (816) 459-2073","address":"636 Nolans Lane, Camptown, California, 7794","about":"Dolor proident incididunt ex labore quis ullamco duis. Sit esse laboris nisi eu voluptate nulla cupidatat nulla fugiat veniam. Culpa cillum est esse dolor consequat. Pariatur ex sit irure qui do fugiat. Fugiat culpa veniam est nisi excepteur quis cupidatat et minim in esse minim dolor et. Anim aliquip labore dolor occaecat nisi sunt dolore pariatur veniam nostrud est ut.\r\n","registered":"2015-02-14T01:05:50 -01:00","latitude":-46.591249,"longitude":-83.385587,"tags":["good first issue","bug"]}
|
||||
{"id":63,"isActive":true,"balance":"$2,478.30","picture":"http://placehold.it/32x32","age":21,"color":"blue","name":"Knowles Hebert","gender":"male","email":"knowleshebert@chorizon.com","phone":"+1 (819) 409-2308","address":"361 Kathleen Court, Gratton, Connecticut, 7254","about":"Esse mollit nulla eiusmod esse duis non proident excepteur labore. Nisi ex culpa do mollit dolor ea deserunt elit anim ipsum nostrud. Cupidatat nostrud duis ipsum dolore amet et. Veniam in cillum ea cillum deserunt excepteur officia laboris nulla. Commodo incididunt aliquip qui sunt dolore occaecat labore do laborum irure. Labore culpa duis pariatur reprehenderit ad laboris occaecat anim cillum et fugiat ea.\r\n","registered":"2016-03-08T08:34:52 -01:00","latitude":71.042482,"longitude":152.460406,"tags":["good first issue","wontfix"]}
|
||||
{"id":64,"isActive":false,"balance":"$2,559.09","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Thelma Mckenzie","gender":"female","email":"thelmamckenzie@chorizon.com","phone":"+1 (941) 596-2777","address":"202 Leonard Street, Riverton, Illinois, 8577","about":"Non ad ipsum elit commodo fugiat Lorem ipsum reprehenderit. Commodo incididunt officia cillum eiusmod officia proident ea incididunt ullamco magna commodo consectetur dolor. Nostrud esse nisi ea laboris. Veniam et dolore nulla excepteur pariatur laborum non. Eiusmod reprehenderit do tempor esse eu eu aliquip. Magna quis consectetur ipsum adipisicing mollit elit ad elit.\r\n","registered":"2020-04-14T12:43:06 -02:00","latitude":16.026129,"longitude":105.464476,"tags":[]}
|
||||
{"id":65,"isActive":true,"balance":"$1,025.08","picture":"http://placehold.it/32x32","age":34,"color":"blue","name":"Carole Rowland","gender":"female","email":"carolerowland@chorizon.com","phone":"+1 (862) 558-3448","address":"941 Melba Court, Bluetown, Florida, 9555","about":"Ullamco occaecat ipsum aliqua sit proident eu. Occaecat ut consectetur proident culpa aliqua excepteur quis qui anim irure sit proident mollit irure. Proident cupidatat deserunt dolor adipisicing.\r\n","registered":"2014-12-01T05:55:35 -01:00","latitude":-0.191998,"longitude":43.389652,"tags":["wontfix"]}
|
||||
{"id":66,"isActive":true,"balance":"$1,061.49","picture":"http://placehold.it/32x32","age":35,"color":"brown","name":"Higgins Aguilar","gender":"male","email":"higginsaguilar@chorizon.com","phone":"+1 (911) 540-3791","address":"132 Sackman Street, Layhill, Guam, 8729","about":"Anim ea dolore exercitation minim. Proident cillum non deserunt cupidatat veniam non occaecat aute ullamco irure velit laboris ex aliquip. Voluptate incididunt non ex nulla est ipsum. Amet anim do velit sunt irure sint minim nisi occaecat proident tempor elit exercitation nostrud.\r\n","registered":"2015-04-05T02:10:07 -02:00","latitude":74.702813,"longitude":151.314972,"tags":["bug"]}
|
||||
{"id":67,"isActive":true,"balance":"$3,510.14","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Ilene Gillespie","gender":"female","email":"ilenegillespie@chorizon.com","phone":"+1 (937) 575-2676","address":"835 Lake Street, Naomi, Alabama, 4131","about":"Quis laborum consequat id cupidatat exercitation aute ad ex nulla dolore velit qui proident minim. Et do consequat nisi eiusmod exercitation exercitation enim voluptate elit ullamco. Cupidatat ut adipisicing consequat aute est voluptate sit ipsum culpa ullamco. Ex pariatur ex qui quis qui.\r\n","registered":"2015-06-28T09:41:45 -02:00","latitude":71.573342,"longitude":-95.295989,"tags":["wontfix","wontfix"]}
|
||||
{"id":68,"isActive":false,"balance":"$1,539.98","picture":"http://placehold.it/32x32","age":24,"color":"Green","name":"Angelina Dyer","gender":"female","email":"angelinadyer@chorizon.com","phone":"+1 (948) 574-3949","address":"575 Division Place, Gorham, Louisiana, 3458","about":"Cillum magna eu est veniam incididunt laboris laborum elit mollit incididunt proident non mollit. Dolor mollit culpa ullamco dolore aliqua adipisicing culpa officia. Reprehenderit minim nisi fugiat consectetur dolore.\r\n","registered":"2014-07-08T06:34:36 -02:00","latitude":-85.649593,"longitude":66.126018,"tags":["good first issue"]}
|
||||
{"id":69,"isActive":true,"balance":"$3,367.69","picture":"http://placehold.it/32x32","age":30,"color":"brown","name":"Marks Burt","gender":"male","email":"marksburt@chorizon.com","phone":"+1 (895) 497-3138","address":"819 Village Road, Wadsworth, Delaware, 6099","about":"Fugiat tempor aute voluptate proident exercitation tempor esse dolor id. Duis aliquip exercitation Lorem elit magna sint sit. Culpa adipisicing occaecat aliqua officia reprehenderit laboris sint aliquip. Magna do sunt consequat excepteur nisi do commodo non. Cillum officia nostrud consequat excepteur elit proident in. Tempor ipsum in ut qui cupidatat exercitation est nulla exercitation voluptate.\r\n","registered":"2014-08-31T06:12:18 -02:00","latitude":26.854112,"longitude":-143.313948,"tags":["good first issue"]}
|
||||
{"id":70,"isActive":false,"balance":"$3,755.72","picture":"http://placehold.it/32x32","age":23,"color":"blue","name":"Glass Perkins","gender":"male","email":"glassperkins@chorizon.com","phone":"+1 (923) 486-3725","address":"899 Roosevelt Court, Belleview, Idaho, 1737","about":"Esse magna id labore sunt qui eu enim esse cillum consequat enim eu culpa enim. Duis veniam cupidatat deserunt sunt irure ad Lorem proident aliqua mollit. Laborum mollit aute nulla est. Sunt id proident incididunt ipsum et dolor consectetur laborum enim dolor officia dolore laborum. Est commodo duis et ea consequat labore id id eu aliqua. Qui veniam sit eu aliquip ad sit dolor ullamco et laborum voluptate quis fugiat ex. Exercitation dolore cillum amet ad nisi consectetur occaecat sit aliqua laborum qui proident aliqua exercitation.\r\n","registered":"2015-05-22T05:44:33 -02:00","latitude":54.27147,"longitude":-65.065604,"tags":["wontfix"]}
|
||||
{"id":71,"isActive":true,"balance":"$3,381.63","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Candace Sawyer","gender":"female","email":"candacesawyer@chorizon.com","phone":"+1 (830) 404-2636","address":"334 Arkansas Drive, Bordelonville, Tennessee, 8449","about":"Et aliqua elit incididunt et aliqua. Deserunt ut elit proident ullamco ut. Ex exercitation amet non eu reprehenderit ea voluptate qui sit reprehenderit ad sint excepteur.\r\n","registered":"2014-04-04T08:45:00 -02:00","latitude":6.484262,"longitude":-37.054928,"tags":["new issue","new issue"]}
|
||||
{"id":72,"isActive":true,"balance":"$1,640.98","picture":"http://placehold.it/32x32","age":27,"color":"Green","name":"Hendricks Martinez","gender":"male","email":"hendricksmartinez@chorizon.com","phone":"+1 (857) 566-3245","address":"636 Agate Court, Newry, Utah, 3304","about":"Do sit culpa amet incididunt officia enim occaecat incididunt excepteur enim tempor deserunt qui. Excepteur adipisicing anim consectetur adipisicing proident anim laborum qui. Aliquip nostrud cupidatat sit ullamco.\r\n","registered":"2018-06-15T10:36:11 -02:00","latitude":86.746034,"longitude":10.347893,"tags":["new issue"]}
|
||||
{"id":73,"isActive":false,"balance":"$1,239.74","picture":"http://placehold.it/32x32","age":38,"color":"blue","name":"Eleanor Shepherd","gender":"female","email":"eleanorshepherd@chorizon.com","phone":"+1 (894) 567-2617","address":"670 Lafayette Walk, Darlington, Palau, 8803","about":"Adipisicing ad incididunt id veniam magna cupidatat et labore eu deserunt mollit. Lorem voluptate exercitation elit eu aliquip cupidatat occaecat anim excepteur reprehenderit est est. Ipsum excepteur ea mollit qui nisi laboris ex qui. Cillum velit culpa culpa commodo laboris nisi Lorem non elit deserunt incididunt. Officia quis velit nulla sint incididunt duis mollit tempor adipisicing qui officia eu nisi Lorem. Do proident pariatur ex enim nostrud eu aute esse deserunt eu velit quis culpa exercitation. Occaecat ad cupidatat ullamco consequat duis anim deserunt occaecat aliqua sunt consectetur ipsum magna.\r\n","registered":"2020-02-29T12:15:28 -01:00","latitude":35.749621,"longitude":-94.40842,"tags":["good first issue","new issue","new issue","bug"]}
|
||||
{"id":74,"isActive":true,"balance":"$1,180.90","picture":"http://placehold.it/32x32","age":36,"color":"Green","name":"Stark Wong","gender":"male","email":"starkwong@chorizon.com","phone":"+1 (805) 575-3055","address":"522 Bond Street, Bawcomville, Wisconsin, 324","about":"Aute qui sit incididunt eu adipisicing exercitation sunt nostrud. Id laborum incididunt proident ipsum est cillum esse. Officia ullamco eu ut Lorem do minim ea dolor consequat sit eu est voluptate. Id commodo cillum enim culpa aliquip ullamco nisi Lorem cillum ipsum cupidatat anim officia eu. Dolore sint elit labore pariatur. Officia duis nulla voluptate et nulla ut voluptate laboris eu commodo veniam qui veniam.\r\n","registered":"2020-01-25T10:47:48 -01:00","latitude":-80.452139,"longitude":160.72546,"tags":["wontfix"]}
|
||||
{"id":75,"isActive":false,"balance":"$1,913.42","picture":"http://placehold.it/32x32","age":24,"color":"Green","name":"Emma Jacobs","gender":"female","email":"emmajacobs@chorizon.com","phone":"+1 (899) 554-3847","address":"173 Tapscott Street, Esmont, Maine, 7450","about":"Laboris consequat consectetur tempor labore ullamco ullamco voluptate quis quis duis ut ad. In est irure quis amet sunt nulla ad ut sit labore ut eu quis duis. Nostrud cupidatat aliqua sunt occaecat minim id consequat officia deserunt laborum. Ea dolor reprehenderit laborum veniam exercitation est nostrud excepteur laborum minim id qui et.\r\n","registered":"2019-03-29T06:24:13 -01:00","latitude":-35.53722,"longitude":155.703874,"tags":[]}
|
||||
{"id":76,"isActive":false,"balance":"$1,274.29","picture":"http://placehold.it/32x32","age":25,"color":"Green","name":"Clarice Gardner","gender":"female","email":"claricegardner@chorizon.com","phone":"+1 (810) 407-3258","address":"894 Brooklyn Road, Utting, New Hampshire, 6404","about":"Elit occaecat aute ea adipisicing mollit cupidatat aliquip excepteur veniam minim. Sunt quis dolore in commodo aute esse quis. Lorem in cillum commodo eu anim commodo mollit. Adipisicing enim sunt adipisicing cupidatat adipisicing eiusmod eu do sit nisi.\r\n","registered":"2014-10-20T10:13:32 -02:00","latitude":17.11935,"longitude":65.38197,"tags":["new issue","wontfix"]}
|
59
meilisearch-http/tests/assets/dumps/v1/test/settings.json
Normal file
59
meilisearch-http/tests/assets/dumps/v1/test/settings.json
Normal file
@ -0,0 +1,59 @@
|
||||
{
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": "email",
|
||||
"searchableAttributes": [
|
||||
"balance",
|
||||
"picture",
|
||||
"age",
|
||||
"color",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about",
|
||||
"registered",
|
||||
"latitude",
|
||||
"longitude",
|
||||
"tags"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"id",
|
||||
"isActive",
|
||||
"balance",
|
||||
"picture",
|
||||
"age",
|
||||
"color",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about",
|
||||
"registered",
|
||||
"latitude",
|
||||
"longitude",
|
||||
"tags"
|
||||
],
|
||||
"stopWords": [
|
||||
"in",
|
||||
"ad"
|
||||
],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine", "xmen"]
|
||||
},
|
||||
"attributesForFaceting": [
|
||||
"gender",
|
||||
"color",
|
||||
"tags",
|
||||
"isActive"
|
||||
]
|
||||
}
|
@ -0,0 +1,2 @@
|
||||
{"status": "processed","updateId": 0,"type": {"name":"Settings","settings":{"ranking_rules":{"Update":["Typo","Words","Proximity","Attribute","WordsPosition","Exactness"]},"distinct_attribute":"Nothing","primary_key":"Nothing","searchable_attributes":{"Update":["balance","picture","age","color","name","gender","email","phone","address","about","registered","latitude","longitude","tags"]},"displayed_attributes":{"Update":["about","address","age","balance","color","email","gender","id","isActive","latitude","longitude","name","phone","picture","registered","tags"]},"stop_words":"Nothing","synonyms":"Nothing","attributes_for_faceting":"Nothing"}}}
|
||||
{"status": "processed", "updateId": 1, "type": { "name": "DocumentsAddition"}}
|
@ -5,7 +5,7 @@
|
||||
"balance": "$2,668.55",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lucas Hess",
|
||||
"gender": "male",
|
||||
"email": "lucashess@chorizon.com",
|
||||
@ -26,7 +26,7 @@
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -90,7 +90,7 @@
|
||||
"balance": "$2,575.78",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 39,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Mariana Pacheco",
|
||||
"gender": "female",
|
||||
"email": "marianapacheco@chorizon.com",
|
||||
@ -110,7 +110,7 @@
|
||||
"balance": "$3,793.09",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 20,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Warren Watson",
|
||||
"gender": "male",
|
||||
"email": "warrenwatson@chorizon.com",
|
||||
@ -155,7 +155,7 @@
|
||||
"balance": "$1,349.50",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Chrystal Boyd",
|
||||
"gender": "female",
|
||||
"email": "chrystalboyd@chorizon.com",
|
||||
@ -235,7 +235,7 @@
|
||||
"balance": "$1,351.43",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Evans Wagner",
|
||||
"gender": "male",
|
||||
"email": "evanswagner@chorizon.com",
|
||||
@ -431,7 +431,7 @@
|
||||
"balance": "$1,986.48",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Florence Long",
|
||||
"gender": "female",
|
||||
"email": "florencelong@chorizon.com",
|
||||
@ -530,7 +530,7 @@
|
||||
"balance": "$3,973.43",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 29,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Sykes Conley",
|
||||
"gender": "male",
|
||||
"email": "sykesconley@chorizon.com",
|
||||
@ -813,7 +813,7 @@
|
||||
"balance": "$1,992.38",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 40,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Christina Short",
|
||||
"gender": "female",
|
||||
"email": "christinashort@chorizon.com",
|
||||
@ -944,7 +944,7 @@
|
||||
"balance": "$2,893.45",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 22,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Joni Spears",
|
||||
"gender": "female",
|
||||
"email": "jonispears@chorizon.com",
|
||||
@ -988,7 +988,7 @@
|
||||
"balance": "$1,348.04",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 34,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lawson Curtis",
|
||||
"gender": "male",
|
||||
"email": "lawsoncurtis@chorizon.com",
|
||||
@ -1006,7 +1006,7 @@
|
||||
"balance": "$1,132.41",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Goff May",
|
||||
"gender": "male",
|
||||
"email": "goffmay@chorizon.com",
|
||||
@ -1026,7 +1026,7 @@
|
||||
"balance": "$1,201.87",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Goodman Becker",
|
||||
"gender": "male",
|
||||
"email": "goodmanbecker@chorizon.com",
|
||||
@ -1069,7 +1069,7 @@
|
||||
"balance": "$1,947.08",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 21,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Guerra Mcintyre",
|
||||
"gender": "male",
|
||||
"email": "guerramcintyre@chorizon.com",
|
||||
@ -1153,7 +1153,7 @@
|
||||
"balance": "$2,113.29",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Richards Walls",
|
||||
"gender": "male",
|
||||
"email": "richardswalls@chorizon.com",
|
||||
@ -1211,7 +1211,7 @@
|
||||
"balance": "$1,844.56",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 20,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Kaitlin Conner",
|
||||
"gender": "female",
|
||||
"email": "kaitlinconner@chorizon.com",
|
||||
@ -1229,7 +1229,7 @@
|
||||
"balance": "$2,876.10",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Mamie Fischer",
|
||||
"gender": "female",
|
||||
"email": "mamiefischer@chorizon.com",
|
||||
@ -1252,7 +1252,7 @@
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -1291,7 +1291,7 @@
|
||||
"balance": "$2,813.41",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 37,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Charles Castillo",
|
||||
"gender": "male",
|
||||
"email": "charlescastillo@chorizon.com",
|
||||
@ -1433,7 +1433,7 @@
|
||||
"balance": "$1,539.98",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 24,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Angelina Dyer",
|
||||
"gender": "female",
|
||||
"email": "angelinadyer@chorizon.com",
|
||||
@ -1493,7 +1493,7 @@
|
||||
"balance": "$3,381.63",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 38,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Candace Sawyer",
|
||||
"gender": "female",
|
||||
"email": "candacesawyer@chorizon.com",
|
||||
@ -1514,7 +1514,7 @@
|
||||
"balance": "$1,640.98",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Hendricks Martinez",
|
||||
"gender": "male",
|
||||
"email": "hendricksmartinez@chorizon.com",
|
||||
@ -1557,7 +1557,7 @@
|
||||
"balance": "$1,180.90",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Stark Wong",
|
||||
"gender": "male",
|
||||
"email": "starkwong@chorizon.com",
|
||||
@ -1577,7 +1577,7 @@
|
||||
"balance": "$1,913.42",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 24,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Emma Jacobs",
|
||||
"gender": "female",
|
||||
"email": "emmajacobs@chorizon.com",
|
||||
@ -1595,7 +1595,7 @@
|
||||
"balance": "$1,274.29",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 25,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Clarice Gardner",
|
||||
"gender": "female",
|
||||
"email": "claricegardner@chorizon.com",
|
||||
|
@ -15,21 +15,30 @@ use meilisearch_http::option::Opt;
|
||||
#[macro_export]
|
||||
macro_rules! test_post_get_search {
|
||||
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
|
||||
let post_query: meilisearch_http::routes::search::SearchQueryPost = serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||
let post_query: meilisearch_http::routes::search::SearchQueryPost =
|
||||
serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
|
||||
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
|
||||
let ($response, $status_code) = $server.search_get(&get_query).await;
|
||||
let _ =::std::panic::catch_unwind(|| $block)
|
||||
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
|
||||
panic!(
|
||||
"panic in get route: {:?}",
|
||||
e.downcast_ref::<&str>().unwrap()
|
||||
)
|
||||
});
|
||||
let ($response, $status_code) = $server.search_post($query).await;
|
||||
let _ = ::std::panic::catch_unwind(|| $block)
|
||||
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
|
||||
panic!(
|
||||
"panic in post route: {:?}",
|
||||
e.downcast_ref::<&str>().unwrap()
|
||||
)
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
pub struct Server {
|
||||
uid: String,
|
||||
data: Data,
|
||||
pub uid: String,
|
||||
pub data: Data,
|
||||
}
|
||||
|
||||
impl Server {
|
||||
@ -39,18 +48,20 @@ impl Server {
|
||||
let default_db_options = DatabaseOptions::default();
|
||||
|
||||
let opt = Opt {
|
||||
db_path: tmp_dir.path().to_str().unwrap().to_string(),
|
||||
db_path: tmp_dir.path().join("db").to_str().unwrap().to_string(),
|
||||
dumps_dir: tmp_dir.path().join("dump"),
|
||||
dump_batch_size: 16,
|
||||
http_addr: "127.0.0.1:7700".to_owned(),
|
||||
master_key: None,
|
||||
env: "development".to_owned(),
|
||||
no_analytics: true,
|
||||
main_map_size: default_db_options.main_map_size,
|
||||
update_map_size: default_db_options.update_map_size,
|
||||
max_mdb_size: default_db_options.main_map_size,
|
||||
max_udb_size: default_db_options.update_map_size,
|
||||
http_payload_size_limit: 10000000,
|
||||
..Opt::default()
|
||||
};
|
||||
|
||||
let data = Data::new(opt.clone()).unwrap();
|
||||
let data = Data::new(opt).unwrap();
|
||||
|
||||
Server {
|
||||
uid: uid.to_string(),
|
||||
@ -59,7 +70,6 @@ impl Server {
|
||||
}
|
||||
|
||||
pub async fn test_server() -> Self {
|
||||
|
||||
let mut server = Self::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
@ -124,6 +134,9 @@ impl Server {
|
||||
server
|
||||
}
|
||||
|
||||
pub fn data(&self) -> &Data {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub async fn wait_update_id(&mut self, update_id: u64) {
|
||||
// try 10 times to get status, or panic to not wait forever
|
||||
@ -132,7 +145,7 @@ impl Server {
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
if response["status"] == "processed" || response["status"] == "failed" {
|
||||
eprintln!("{:#?}", response);
|
||||
// eprintln!("{:#?}", response);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -146,28 +159,30 @@ impl Server {
|
||||
pub async fn get_request(&mut self, url: &str) -> (Value, StatusCode) {
|
||||
eprintln!("get_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data, true).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::get().uri(url).to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
let status_code = res.status().clone();
|
||||
let status_code = res.status();
|
||||
|
||||
let body = test::read_body(res).await;
|
||||
let response = serde_json::from_slice(&body).unwrap_or_default();
|
||||
(response, status_code)
|
||||
}
|
||||
|
||||
pub async fn post_request(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
|
||||
pub async fn post_request(&self, url: &str, body: Value) -> (Value, StatusCode) {
|
||||
eprintln!("post_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data, true).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::post()
|
||||
.uri(url)
|
||||
.set_json(&body)
|
||||
.to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
let status_code = res.status().clone();
|
||||
let status_code = res.status();
|
||||
|
||||
let body = test::read_body(res).await;
|
||||
let response = serde_json::from_slice(&body).unwrap_or_default();
|
||||
@ -179,7 +194,6 @@ impl Server {
|
||||
|
||||
let (response, status_code) = self.post_request(url, body).await;
|
||||
eprintln!("response: {}", response);
|
||||
assert_eq!(status_code, 202);
|
||||
assert!(response["updateId"].as_u64().is_some());
|
||||
self.wait_update_id(response["updateId"].as_u64().unwrap())
|
||||
.await;
|
||||
@ -189,14 +203,15 @@ impl Server {
|
||||
pub async fn put_request(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
|
||||
eprintln!("put_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data, true).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::put()
|
||||
.uri(url)
|
||||
.set_json(&body)
|
||||
.to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
let status_code = res.status().clone();
|
||||
let status_code = res.status();
|
||||
|
||||
let body = test::read_body(res).await;
|
||||
let response = serde_json::from_slice(&body).unwrap_or_default();
|
||||
@ -217,11 +232,12 @@ impl Server {
|
||||
pub async fn delete_request(&mut self, url: &str) -> (Value, StatusCode) {
|
||||
eprintln!("delete_request: {}", url);
|
||||
|
||||
let mut app = test::init_service(meilisearch_http::create_app(&self.data).wrap(NormalizePath)).await;
|
||||
let mut app =
|
||||
test::init_service(meilisearch_http::create_app(&self.data, true).wrap(NormalizePath)).await;
|
||||
|
||||
let req = test::TestRequest::delete().uri(url).to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
let status_code = res.status().clone();
|
||||
let status_code = res.status();
|
||||
|
||||
let body = test::read_body(res).await;
|
||||
let response = serde_json::from_slice(&body).unwrap_or_default();
|
||||
@ -335,9 +351,9 @@ impl Server {
|
||||
self.delete_request_async(&url).await
|
||||
}
|
||||
|
||||
pub async fn delete_multiple_documents(&mut self, body: Value) {
|
||||
pub async fn delete_multiple_documents(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/delete-batch", self.uid);
|
||||
self.post_request_async(&url, body).await;
|
||||
self.post_request_async(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn get_all_settings(&mut self) -> (Value, StatusCode) {
|
||||
@ -350,6 +366,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_all_settings_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_all_settings(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -385,6 +406,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_distinct_attribute_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/distinct-attribute", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_distinct_attribute(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/distinct-attribute", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -405,6 +431,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_searchable_attributes_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/searchable-attributes", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_searchable_attributes(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/searchable-attributes", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -420,11 +451,39 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_displayed_attributes_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/displayed-attributes", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_displayed_attributes(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/displayed-attributes", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
}
|
||||
|
||||
pub async fn get_attributes_for_faceting(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.get_request(&url).await
|
||||
}
|
||||
|
||||
pub async fn update_attributes_for_faceting(&mut self, body: Value) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_attributes_for_faceting_sync(
|
||||
&mut self,
|
||||
body: Value,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_attributes_for_faceting(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/attributes-for-faceting", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
}
|
||||
|
||||
pub async fn get_synonyms(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/synonyms", self.uid);
|
||||
self.get_request(&url).await
|
||||
@ -435,6 +494,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_synonyms_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/synonyms", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_synonyms(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/synonyms", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -450,6 +514,11 @@ impl Server {
|
||||
self.post_request_async(&url, body).await;
|
||||
}
|
||||
|
||||
pub async fn update_stop_words_sync(&mut self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/stop-words", self.uid);
|
||||
self.post_request(&url, body).await
|
||||
}
|
||||
|
||||
pub async fn delete_stop_words(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/stop-words", self.uid);
|
||||
self.delete_request_async(&url).await
|
||||
@ -483,4 +552,18 @@ impl Server {
|
||||
pub async fn get_sys_info_pretty(&mut self) -> (Value, StatusCode) {
|
||||
self.get_request("/sys-info/pretty").await
|
||||
}
|
||||
|
||||
pub async fn trigger_dump(&self) -> (Value, StatusCode) {
|
||||
self.post_request("/dumps", Value::Null).await
|
||||
}
|
||||
|
||||
pub async fn get_dump_status(&mut self, dump_uid: &str) -> (Value, StatusCode) {
|
||||
let url = format!("/dumps/{}/status", dump_uid);
|
||||
self.get_request(&url).await
|
||||
}
|
||||
|
||||
pub async fn trigger_dump_importation(&mut self, dump_uid: &str) -> (Value, StatusCode) {
|
||||
let url = format!("/dumps/{}/import", dump_uid);
|
||||
self.get_request(&url).await
|
||||
}
|
||||
}
|
||||
|
@ -192,7 +192,9 @@ async fn add_document_with_long_field() {
|
||||
"url":"/configuration/app/web.html#locations"
|
||||
}]);
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
let (response, _status) = server.search_post(json!({ "q": "request_buffering" })).await;
|
||||
let (response, _status) = server
|
||||
.search_post(json!({ "q": "request_buffering" }))
|
||||
.await;
|
||||
assert!(!response["hits"].as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
@ -213,5 +215,8 @@ async fn documents_with_same_id_are_overwritten() {
|
||||
server.add_or_replace_multiple_documents(documents).await;
|
||||
let (response, _status) = server.get_all_documents().await;
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
assert_eq!(response.as_array().unwrap()[0].as_object().unwrap()["content"], "test2");
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0].as_object().unwrap()["content"],
|
||||
"test2"
|
||||
);
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
mod common;
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
@ -32,3 +34,34 @@ async fn delete_batch() {
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn text_clear_all_placeholder_search() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
server.create_index(body).await;
|
||||
let settings = json!({
|
||||
"attributesForFaceting": ["genre"],
|
||||
});
|
||||
|
||||
server.update_all_settings(settings).await;
|
||||
|
||||
let documents = json!([
|
||||
{ "id": 2, "title": "Pride and Prejudice", "author": "Jane Austin", "genre": "romance" },
|
||||
{ "id": 456, "title": "Le Petit Prince", "author": "Antoine de Saint-Exupéry", "genre": "adventure" },
|
||||
{ "id": 1, "title": "Alice In Wonderland", "author": "Lewis Carroll", "genre": "fantasy" },
|
||||
{ "id": 1344, "title": "The Hobbit", "author": "J. R. R. Tolkien", "genre": "fantasy" },
|
||||
{ "id": 4, "title": "Harry Potter and the Half-Blood Prince", "author": "J. K. Rowling", "genre": "fantasy" },
|
||||
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams" }
|
||||
]);
|
||||
|
||||
server.add_or_update_multiple_documents(documents).await;
|
||||
server.clear_all_documents().await;
|
||||
let (response, _) = server.search_post(json!({ "q": "", "facetsDistribution": ["genre"] })).await;
|
||||
assert_eq!(response["nbHits"], 0);
|
||||
let (response, _) = server.search_post(json!({ "q": "" })).await;
|
||||
assert_eq!(response["nbHits"], 0);
|
||||
}
|
||||
|
23
meilisearch-http/tests/documents_get.rs
Normal file
23
meilisearch-http/tests/documents_get.rs
Normal file
@ -0,0 +1,23 @@
|
||||
use serde_json::json;
|
||||
use actix_web::http::StatusCode;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_documents_from_unexisting_index_is_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let (response, status) = server.get_all_documents().await;
|
||||
assert_eq!(status, StatusCode::NOT_FOUND);
|
||||
assert_eq!(response["errorCode"], "index_not_found");
|
||||
assert_eq!(response["errorType"], "invalid_request_error");
|
||||
assert_eq!(response["errorLink"], "https://docs.meilisearch.com/errors#index_not_found");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_empty_documents_list() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
server.create_index(json!({ "uid": "test" })).await;
|
||||
let (response, status) = server.get_all_documents().await;
|
||||
assert_eq!(status, StatusCode::OK);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
395
meilisearch-http/tests/dump.rs
Normal file
395
meilisearch-http/tests/dump.rs
Normal file
@ -0,0 +1,395 @@
|
||||
use assert_json_diff::{assert_json_eq, assert_json_include};
|
||||
use meilisearch_http::helpers::compression;
|
||||
use serde_json::{json, Value};
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[macro_use] mod common;
|
||||
|
||||
async fn trigger_and_wait_dump(server: &mut common::Server) -> String {
|
||||
let (value, status_code) = server.trigger_dump().await;
|
||||
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let dump_uid = value["uid"].as_str().unwrap().to_string();
|
||||
|
||||
for _ in 0..20 as u8 {
|
||||
let (value, status_code) = server.get_dump_status(&dump_uid).await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_ne!(value["status"].as_str(), Some("dump_process_failed"));
|
||||
|
||||
if value["status"].as_str() == Some("done") { return dump_uid }
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
}
|
||||
|
||||
unreachable!("dump creation runned out of time")
|
||||
}
|
||||
|
||||
fn current_db_version() -> (String, String, String) {
|
||||
let current_version_major = env!("CARGO_PKG_VERSION_MAJOR").to_string();
|
||||
let current_version_minor = env!("CARGO_PKG_VERSION_MINOR").to_string();
|
||||
let current_version_patch = env!("CARGO_PKG_VERSION_PATCH").to_string();
|
||||
|
||||
(current_version_major, current_version_minor, current_version_patch)
|
||||
}
|
||||
|
||||
fn current_dump_version() -> String {
|
||||
"V1".into()
|
||||
}
|
||||
|
||||
fn read_all_jsonline<R: std::io::Read>(r: R) -> Value {
|
||||
let deserializer = serde_json::Deserializer::from_reader(r);
|
||||
let iterator = deserializer.into_iter::<serde_json::Value>();
|
||||
|
||||
json!(iterator.map(|v| v.unwrap()).collect::<Vec<Value>>())
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn trigger_dump_should_return_ok() {
|
||||
let server = common::Server::test_server().await;
|
||||
|
||||
let (_, status_code) = server.trigger_dump().await;
|
||||
|
||||
assert_eq!(status_code, 202);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn trigger_dump_twice_should_return_conflict() {
|
||||
let server = common::Server::test_server().await;
|
||||
|
||||
let expected = json!({
|
||||
"message": "Another dump is already in progress",
|
||||
"errorCode": "dump_already_in_progress",
|
||||
"errorType": "invalid_request_error",
|
||||
"errorLink": "https://docs.meilisearch.com/errors#dump_already_in_progress"
|
||||
});
|
||||
|
||||
let (_, status_code) = server.trigger_dump().await;
|
||||
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let (value, status_code) = server.trigger_dump().await;
|
||||
|
||||
|
||||
assert_json_eq!(expected, value, ordered: false);
|
||||
assert_eq!(status_code, 409);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn trigger_dump_concurently_should_return_conflict() {
|
||||
let server = common::Server::test_server().await;
|
||||
|
||||
let expected = json!({
|
||||
"message": "Another dump is already in progress",
|
||||
"errorCode": "dump_already_in_progress",
|
||||
"errorType": "invalid_request_error",
|
||||
"errorLink": "https://docs.meilisearch.com/errors#dump_already_in_progress"
|
||||
});
|
||||
|
||||
let ((_value_1, _status_code_1), (value_2, status_code_2)) = futures::join!(server.trigger_dump(), server.trigger_dump());
|
||||
|
||||
assert_json_eq!(expected, value_2, ordered: false);
|
||||
assert_eq!(status_code_2, 409);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn get_dump_status_early_should_return_in_progress() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
|
||||
|
||||
let (value, status_code) = server.trigger_dump().await;
|
||||
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let dump_uid = value["uid"].as_str().unwrap().to_string();
|
||||
|
||||
let (value, status_code) = server.get_dump_status(&dump_uid).await;
|
||||
|
||||
let expected = json!({
|
||||
"uid": dump_uid,
|
||||
"status": "in_progress"
|
||||
});
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_json_eq!(expected, value, ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn get_dump_status_should_return_done() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
|
||||
let (value, status_code) = server.trigger_dump().await;
|
||||
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let dump_uid = value["uid"].as_str().unwrap().to_string();
|
||||
|
||||
let expected = json!({
|
||||
"uid": dump_uid.clone(),
|
||||
"status": "done"
|
||||
});
|
||||
|
||||
thread::sleep(Duration::from_secs(1)); // wait dump until process end
|
||||
|
||||
let (value, status_code) = server.get_dump_status(&dump_uid).await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_json_eq!(expected, value, ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn get_dump_status_should_return_error_provoking_it() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
|
||||
let (value, status_code) = server.trigger_dump().await;
|
||||
|
||||
// removing destination directory provoking `No such file or directory` error
|
||||
std::fs::remove_dir(server.data().dumps_dir.clone()).unwrap();
|
||||
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let dump_uid = value["uid"].as_str().unwrap().to_string();
|
||||
|
||||
let expected = json!({
|
||||
"uid": dump_uid.clone(),
|
||||
"status": "failed",
|
||||
"message": "Dump process failed: compressing dump; No such file or directory (os error 2)",
|
||||
"errorCode": "dump_process_failed",
|
||||
"errorType": "internal_error",
|
||||
"errorLink": "https://docs.meilisearch.com/errors#dump_process_failed"
|
||||
});
|
||||
|
||||
thread::sleep(Duration::from_secs(1)); // wait dump until process end
|
||||
|
||||
let (value, status_code) = server.get_dump_status(&dump_uid).await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_json_eq!(expected, value, ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn dump_metadata_should_be_valid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let body = json!({
|
||||
"uid": "test2",
|
||||
"primaryKey": "test2_id",
|
||||
});
|
||||
|
||||
server.create_index(body).await;
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("metadata.json")).unwrap();
|
||||
let mut metadata: serde_json::Value = serde_json::from_reader(file).unwrap();
|
||||
|
||||
// fields are randomly ordered
|
||||
metadata.get_mut("indexes").unwrap()
|
||||
.as_array_mut().unwrap()
|
||||
.sort_by(|a, b|
|
||||
a.get("uid").unwrap().as_str().cmp(&b.get("uid").unwrap().as_str())
|
||||
);
|
||||
|
||||
let (major, minor, patch) = current_db_version();
|
||||
|
||||
let expected = json!({
|
||||
"indexes": [{
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
}, {
|
||||
"uid": "test2",
|
||||
"primaryKey": "test2_id",
|
||||
}
|
||||
],
|
||||
"dbVersion": format!("{}.{}.{}", major, minor, patch),
|
||||
"dumpVersion": current_dump_version()
|
||||
});
|
||||
|
||||
assert_json_include!(expected: expected, actual: metadata);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn dump_gzip_should_have_been_created() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
|
||||
let dump_uid = trigger_and_wait_dump(&mut server).await;
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
|
||||
let compressed_path = dumps_dir.join(format!("{}.dump", dump_uid));
|
||||
assert!(File::open(compressed_path).is_ok());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn dump_index_settings_should_be_valid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let expected = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": "email",
|
||||
"searchableAttributes": [
|
||||
"balance",
|
||||
"picture",
|
||||
"age",
|
||||
"color",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about",
|
||||
"registered",
|
||||
"latitude",
|
||||
"longitude",
|
||||
"tags"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"id",
|
||||
"isActive",
|
||||
"balance",
|
||||
"picture",
|
||||
"age",
|
||||
"color",
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"about",
|
||||
"registered",
|
||||
"latitude",
|
||||
"longitude",
|
||||
"tags"
|
||||
],
|
||||
"stopWords": [
|
||||
"in",
|
||||
"ad"
|
||||
],
|
||||
"synonyms": {
|
||||
"wolverine": ["xmen", "logan"],
|
||||
"logan": ["wolverine", "xmen"]
|
||||
},
|
||||
"attributesForFaceting": [
|
||||
"gender",
|
||||
"color",
|
||||
"tags"
|
||||
]
|
||||
});
|
||||
|
||||
server.update_all_settings(expected.clone()).await;
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("test").join("settings.json")).unwrap();
|
||||
let settings: serde_json::Value = serde_json::from_reader(file).unwrap();
|
||||
|
||||
assert_json_eq!(expected, settings, ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn dump_index_documents_should_be_valid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let dataset = include_bytes!("assets/dumps/v1/test/documents.jsonl");
|
||||
let mut slice: &[u8] = dataset;
|
||||
|
||||
let expected: Value = read_all_jsonline(&mut slice);
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("test").join("documents.jsonl")).unwrap();
|
||||
let documents = read_all_jsonline(file);
|
||||
|
||||
assert_json_eq!(expected, documents, ordered: false);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn dump_index_updates_should_be_valid() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let dataset = include_bytes!("assets/dumps/v1/test/updates.jsonl");
|
||||
let mut slice: &[u8] = dataset;
|
||||
|
||||
let expected: Value = read_all_jsonline(&mut slice);
|
||||
|
||||
let uid = trigger_and_wait_dump(&mut server).await;
|
||||
|
||||
let dumps_dir = Path::new(&server.data().dumps_dir);
|
||||
let tmp_dir = TempDir::new().unwrap();
|
||||
let tmp_dir_path = tmp_dir.path();
|
||||
|
||||
compression::from_tar_gz(&dumps_dir.join(&format!("{}.dump", uid)), tmp_dir_path).unwrap();
|
||||
|
||||
let file = File::open(tmp_dir_path.join("test").join("updates.jsonl")).unwrap();
|
||||
let mut updates = read_all_jsonline(file);
|
||||
|
||||
|
||||
// hotfix until #943 is fixed (https://github.com/meilisearch/MeiliSearch/issues/943)
|
||||
updates.as_array_mut().unwrap()
|
||||
.get_mut(0).unwrap()
|
||||
.get_mut("type").unwrap()
|
||||
.get_mut("settings").unwrap()
|
||||
.get_mut("displayed_attributes").unwrap()
|
||||
.get_mut("Update").unwrap()
|
||||
.as_array_mut().unwrap().sort_by(|a, b| a.as_str().cmp(&b.as_str()));
|
||||
|
||||
eprintln!("{}\n", updates.to_string());
|
||||
eprintln!("{}", expected.to_string());
|
||||
assert_json_include!(expected: expected, actual: updates);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn get_unexisting_dump_status_should_return_not_found() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let (_, status_code) = server.get_dump_status("4242").await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
@ -43,11 +43,15 @@ async fn index_already_exists_error() {
|
||||
let (response, status_code) = server.create_index(body.clone()).await;
|
||||
println!("{}", response);
|
||||
assert_eq!(status_code, StatusCode::CREATED);
|
||||
|
||||
let (response, status_code) = server.create_index(body.clone()).await;
|
||||
println!("{}", response);
|
||||
|
||||
assert_error!(
|
||||
"index_already_exists",
|
||||
"invalid_request_error",
|
||||
StatusCode::BAD_REQUEST,
|
||||
server.create_index(body).await);
|
||||
(response, status_code));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -1,6 +1,3 @@
|
||||
use serde_json::json;
|
||||
use std::convert::Into;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -10,29 +7,5 @@ async fn test_healthyness() {
|
||||
// Check that the server is healthy
|
||||
|
||||
let (_response, status_code) = server.get_health().await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
// Set the serve Unhealthy
|
||||
let body = json!({
|
||||
"health": false,
|
||||
});
|
||||
let (_response, status_code) = server.update_health(body).await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
// Check that the server is unhealthy
|
||||
|
||||
let (_response, status_code) = server.get_health().await;
|
||||
assert_eq!(status_code, 503);
|
||||
|
||||
// Set the server healthy
|
||||
let body = json!({
|
||||
"health": true,
|
||||
});
|
||||
let (_response, status_code) = server.update_health(body).await;
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
// Check if the server is healthy
|
||||
|
||||
let (_response, status_code) = server.get_health().await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(status_code, 204);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use actix_web::http::StatusCode;
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
mod common;
|
||||
|
||||
@ -72,7 +72,10 @@ async fn create_index_with_uid() {
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["errorCode"].as_str().unwrap(), "index_already_exists");
|
||||
assert_eq!(
|
||||
response["errorCode"].as_str().unwrap(),
|
||||
"index_already_exists"
|
||||
);
|
||||
|
||||
// 2 - Check the list of indexes
|
||||
|
||||
@ -665,10 +668,10 @@ async fn check_add_documents_without_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn check_first_update_should_bring_up_processed_status_after_first_docs_addition() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
"uid": "movies",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
@ -676,7 +679,7 @@ async fn check_first_update_should_bring_up_processed_status_after_first_docs_ad
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let dataset = include_bytes!("assets/test_set.json");
|
||||
let dataset = include_bytes!("./assets/test_set.json");
|
||||
|
||||
let body: Value = serde_json::from_slice(dataset).unwrap();
|
||||
|
||||
@ -690,3 +693,117 @@ async fn check_first_update_should_bring_up_processed_status_after_first_docs_ad
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response[0]["status"], "processed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_empty_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let (response, _status) = server.list_indexes().await;
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_and_list_multiple_indices() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
for i in 0..10 {
|
||||
server
|
||||
.create_index(json!({ "uid": format!("test{}", i) }))
|
||||
.await;
|
||||
}
|
||||
let (response, _status) = server.list_indexes().await;
|
||||
assert_eq!(response.as_array().unwrap().len(), 10);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_index_is_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let (response, status) = server.get_index().await;
|
||||
assert_eq!(status, StatusCode::NOT_FOUND);
|
||||
assert_eq!(response["errorCode"], "index_not_found");
|
||||
assert_eq!(response["errorType"], "invalid_request_error");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_twice_is_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
server.create_index(json!({ "uid": "test" })).await;
|
||||
let (response, status) = server.create_index(json!({ "uid": "test" })).await;
|
||||
assert_eq!(status, StatusCode::BAD_REQUEST);
|
||||
assert_eq!(response["errorCode"], "index_already_exists");
|
||||
assert_eq!(response["errorType"], "invalid_request_error");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn badly_formatted_index_name_is_error() {
|
||||
let mut server = common::Server::with_uid("$__test");
|
||||
let (response, status) = server.create_index(json!({ "uid": "$__test" })).await;
|
||||
assert_eq!(status, StatusCode::BAD_REQUEST);
|
||||
assert_eq!(response["errorCode"], "invalid_index_uid");
|
||||
assert_eq!(response["errorType"], "invalid_request_error");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn correct_response_no_primary_key_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let (response, _status) = server.create_index(json!({ "uid": "test" })).await;
|
||||
assert_eq!(response["primaryKey"], Value::Null);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn correct_response_with_primary_key_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let (response, _status) = server
|
||||
.create_index(json!({ "uid": "test", "primaryKey": "test" }))
|
||||
.await;
|
||||
assert_eq!(response["primaryKey"], "test");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn udpate_unexisting_index_is_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
let (response, status) = server.update_index(json!({ "primaryKey": "foobar" })).await;
|
||||
assert_eq!(status, StatusCode::NOT_FOUND);
|
||||
assert_eq!(response["errorCode"], "index_not_found");
|
||||
assert_eq!(response["errorType"], "invalid_request_error");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_existing_primary_key_is_error() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
server
|
||||
.create_index(json!({ "uid": "test", "primaryKey": "key" }))
|
||||
.await;
|
||||
let (response, status) = server.update_index(json!({ "primaryKey": "test2" })).await;
|
||||
assert_eq!(status, StatusCode::BAD_REQUEST);
|
||||
assert_eq!(response["errorCode"], "primary_key_already_present");
|
||||
assert_eq!(response["errorType"], "invalid_request_error");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_facets_distribution_attribute() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let (response, _status_code) = server.get_index_stats().await;
|
||||
|
||||
let expected = json!({
|
||||
"isIndexing": false,
|
||||
"numberOfDocuments":77,
|
||||
"fieldsDistribution":{
|
||||
"age":77,
|
||||
"gender":77,
|
||||
"phone":77,
|
||||
"name":77,
|
||||
"registered":77,
|
||||
"latitude":77,
|
||||
"email":77,
|
||||
"tags":77,
|
||||
"longitude":77,
|
||||
"color":77,
|
||||
"address":77,
|
||||
"balance":77,
|
||||
"about":77,
|
||||
"picture":77,
|
||||
},
|
||||
});
|
||||
|
||||
assert_json_eq!(expected, response, ordered: true);
|
||||
}
|
||||
|
200
meilisearch-http/tests/index_update.rs
Normal file
200
meilisearch-http/tests/index_update.rs
Normal file
@ -0,0 +1,200 @@
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use assert_json_diff::assert_json_include;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn check_first_update_should_bring_up_processed_status_after_first_docs_addition() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let dataset = include_bytes!("assets/test_set.json");
|
||||
|
||||
let body: Value = serde_json::from_slice(dataset).unwrap();
|
||||
|
||||
// 2. Index the documents from movies.json, present inside of assets directory
|
||||
server.add_or_replace_multiple_documents(body).await;
|
||||
|
||||
// 3. Fetch the status of the indexing done above.
|
||||
let (response, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 4. Verify the fetch is successful and indexing status is 'processed'
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response[0]["status"], "processed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_error_when_get_update_status_of_unexisting_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
// 1. Fetch the status of unexisting index.
|
||||
let (_, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 2. Verify the fetch returned 404
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_empty_when_get_update_status_of_empty_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
// 2. Fetch the status of empty index.
|
||||
let (response, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 3. Verify the fetch is successful, and no document are returned
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response, json!([]));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_update_status_of_pushed_documents() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
|
||||
let bodies = vec![
|
||||
json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]),
|
||||
json!([{
|
||||
"title": "Test1",
|
||||
"comment": "comment test1"
|
||||
}]),
|
||||
json!([{
|
||||
"title": "Test2",
|
||||
"comment": "comment test2"
|
||||
}]),
|
||||
];
|
||||
|
||||
let mut update_ids = Vec::new();
|
||||
|
||||
let url = "/indexes/test/documents?primaryKey=title";
|
||||
for body in bodies {
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
update_ids.push(update_id);
|
||||
}
|
||||
|
||||
// 2. Fetch the status of index.
|
||||
let (response, status_code) = server.get_all_updates_status().await;
|
||||
|
||||
// 3. Verify the fetch is successful, and updates are returned
|
||||
|
||||
let expected = json!([{
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_ids[0]
|
||||
},{
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_ids[1]
|
||||
},{
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_ids[2]
|
||||
},]);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_include!(actual: json!(response), expected: expected);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_error_if_index_does_not_exist() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let (response, status_code) = server.get_update_status(42).await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
assert_eq!(response["errorCode"], "index_not_found");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn return_error_if_update_does_not_exist() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let (response, status_code) = server.get_update_status(42).await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
assert_eq!(response["errorCode"], "not_found");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn should_return_existing_update() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
});
|
||||
|
||||
// 1. Create Index
|
||||
let (response, status_code) = server.create_index(body).await;
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/test/documents?primaryKey=title";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
|
||||
let expected = json!({
|
||||
"type": {
|
||||
"name": "DocumentsAddition",
|
||||
"number": 1,
|
||||
},
|
||||
"updateId": update_id
|
||||
});
|
||||
|
||||
let (response, status_code) = server.get_update_status(update_id).await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_json_include!(actual: json!(response), expected: expected);
|
||||
}
|
446
meilisearch-http/tests/lazy_index_creation.rs
Normal file
446
meilisearch-http/tests/lazy_index_creation.rs
Normal file
@ -0,0 +1,446 @@
|
||||
use serde_json::json;
|
||||
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_pushing_documents() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Add documents
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/movies/documents?primaryKey=title";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
server.wait_update_id(update_id).await;
|
||||
|
||||
// 3 - Check update success
|
||||
|
||||
let (response, status_code) = server.get_update_status(update_id).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_pushing_documents_and_discover_pk() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Add documents
|
||||
|
||||
let body = json!([{
|
||||
"id": 1,
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/movies/documents";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 202);
|
||||
let update_id = response["updateId"].as_u64().unwrap();
|
||||
server.wait_update_id(update_id).await;
|
||||
|
||||
// 3 - Check update success
|
||||
|
||||
let (response, status_code) = server.get_update_status(update_id).await;
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_pushing_documents_with_wrong_name() {
|
||||
let server = common::Server::with_uid("wrong&name");
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/wrong&name/documents?primaryKey=title";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["errorCode"], "invalid_index_uid");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_add_documents_failed() {
|
||||
let mut server = common::Server::with_uid("wrong&name");
|
||||
|
||||
let body = json!([{
|
||||
"title": "Test",
|
||||
"comment": "comment test"
|
||||
}]);
|
||||
|
||||
let url = "/indexes/wrong&name/documents";
|
||||
let (response, status_code) = server.post_request(&url, body).await;
|
||||
assert_eq!(status_code, 400);
|
||||
assert_eq!(response["errorCode"], "invalid_index_uid");
|
||||
|
||||
let (_, status_code) = server.get_index().await;
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_settings() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
],
|
||||
"stopWords": [
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["name"],
|
||||
});
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_settings_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"other",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"name",
|
||||
"gender",
|
||||
"email",
|
||||
"registered",
|
||||
"age",
|
||||
],
|
||||
"stopWords": [
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"anotherSettings": ["name"],
|
||||
});
|
||||
|
||||
let (_, status_code) = server.update_all_settings_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_ranking_rules() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!([
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
]);
|
||||
|
||||
server.update_ranking_rules(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_ranking_rules_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": 123,
|
||||
});
|
||||
|
||||
let (_, status_code) = server.update_ranking_rules_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_distinct_attribute() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!("type");
|
||||
|
||||
server.update_distinct_attribute(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_distinct_attribute_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (resp, status_code) = server.update_distinct_attribute_sync(body.clone()).await;
|
||||
eprintln!("resp: {:?}", resp);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (resp, status_code) = server.get_all_settings().await;
|
||||
eprintln!("resp: {:?}", resp);
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_searchable_attributes() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["title", "description"]);
|
||||
|
||||
server.update_searchable_attributes(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_searchable_attributes_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_searchable_attributes_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_displayed_attributes() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["title", "description"]);
|
||||
|
||||
server.update_displayed_attributes(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_displayed_attributes_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_displayed_attributes_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_attributes_for_faceting() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["title", "description"]);
|
||||
|
||||
server.update_attributes_for_faceting(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_attributes_for_faceting_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server
|
||||
.update_attributes_for_faceting_sync(body.clone())
|
||||
.await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_synonyms() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!({
|
||||
"road": ["street", "avenue"],
|
||||
"street": ["avenue"],
|
||||
});
|
||||
|
||||
server.update_synonyms(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_synonyms_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_synonyms_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_stop_words() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(["le", "la", "les"]);
|
||||
|
||||
server.update_stop_words(body.clone()).await;
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_index_lazy_by_sending_stop_words_with_error() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
// 2 - Send the settings
|
||||
|
||||
let body = json!(123);
|
||||
|
||||
let (_, status_code) = server.update_stop_words_sync(body.clone()).await;
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get all settings and compare to the previous one
|
||||
|
||||
let (_, status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
@ -2,10 +2,11 @@ use std::convert::Into;
|
||||
|
||||
use serde_json::json;
|
||||
use serde_json::Value;
|
||||
use std::sync::Mutex;
|
||||
use std::cell::RefCell;
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[macro_use] mod common;
|
||||
#[macro_use]
|
||||
mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_limit() {
|
||||
@ -36,9 +37,8 @@ async fn placeholder_search_with_offset() {
|
||||
assert_eq!(status_code, 200);
|
||||
// take results at offset 3 as reference
|
||||
let lock = expected.lock().unwrap();
|
||||
lock.replace(response["hits"].as_array().unwrap()[3..6].iter().cloned().collect());
|
||||
lock.replace(response["hits"].as_array().unwrap()[3..6].to_vec());
|
||||
});
|
||||
|
||||
let expected = expected.into_inner().unwrap().into_inner();
|
||||
|
||||
let query = json!({
|
||||
@ -64,11 +64,7 @@ async fn placeholder_search_with_attribute_to_highlight_wildcard() {
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(status_code, 200);
|
||||
let result = response["hits"]
|
||||
.as_array()
|
||||
.unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap();
|
||||
let result = response["hits"].as_array().unwrap()[0].as_object().unwrap();
|
||||
for value in result.values() {
|
||||
assert!(value.to_string().find("<em>").is_none());
|
||||
}
|
||||
@ -135,11 +131,7 @@ async fn placeholder_search_with_attributes_to_retrieve() {
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hit = response["hits"]
|
||||
.as_array()
|
||||
.unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap();
|
||||
let hit = response["hits"].as_array().unwrap()[0].as_object().unwrap();
|
||||
assert_eq!(hit.values().count(), 2);
|
||||
let _ = hit["gender"];
|
||||
let _ = hit["about"];
|
||||
@ -156,7 +148,7 @@ async fn placeholder_search_with_filter() {
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
assert!(hits.iter().all(|v| v["color"].as_str().unwrap() == "green"));
|
||||
assert!(hits.iter().all(|v| v["color"].as_str().unwrap() == "Green"));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
@ -166,7 +158,9 @@ async fn placeholder_search_with_filter() {
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
let value = Value::String(String::from("bug"));
|
||||
assert!(hits.iter().all(|v| v["tags"].as_array().unwrap().contains(&value)));
|
||||
assert!(hits
|
||||
.iter()
|
||||
.all(|v| v["tags"].as_array().unwrap().contains(&value)));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
@ -176,10 +170,9 @@ async fn placeholder_search_with_filter() {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
let bug = Value::String(String::from("bug"));
|
||||
let wontfix = Value::String(String::from("wontfix"));
|
||||
assert!(hits.iter().all(|v|
|
||||
v["color"].as_str().unwrap() == "green" &&
|
||||
v["tags"].as_array().unwrap().contains(&bug) ||
|
||||
v["tags"].as_array().unwrap().contains(&wontfix)));
|
||||
assert!(hits.iter().all(|v| v["color"].as_str().unwrap() == "Green"
|
||||
&& v["tags"].as_array().unwrap().contains(&bug)
|
||||
|| v["tags"].as_array().unwrap().contains(&wontfix)));
|
||||
});
|
||||
}
|
||||
|
||||
@ -206,7 +199,7 @@ async fn placeholder_test_faceted_search_valid() {
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "green"));
|
||||
.all(|value| value.get("color").unwrap() == "Green"));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
@ -257,7 +250,12 @@ async fn placeholder_test_faceted_search_valid() {
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||
.all(|value| value
|
||||
.get("tags")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.contains(&Value::String("bug".to_owned()))));
|
||||
});
|
||||
|
||||
// test and: ["color:blue", "tags:bug"]
|
||||
@ -272,10 +270,13 @@ async fn placeholder_test_faceted_search_valid() {
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
&& value.get("tags").unwrap().as_array().unwrap().contains(&Value::String("bug".to_owned()))));
|
||||
.all(|value| value.get("color").unwrap() == "blue"
|
||||
&& value
|
||||
.get("tags")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.contains(&Value::String("bug".to_owned()))));
|
||||
});
|
||||
|
||||
// test or: [["color:blue", "color:green"]]
|
||||
@ -290,13 +291,8 @@ async fn placeholder_test_faceted_search_valid() {
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value|
|
||||
value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "green"));
|
||||
.all(|value| value.get("color").unwrap() == "blue"
|
||||
|| value.get("color").unwrap() == "Green"));
|
||||
});
|
||||
// test and-or: ["tags:bug", ["color:blue", "color:green"]]
|
||||
let query = json!({
|
||||
@ -310,20 +306,14 @@ async fn placeholder_test_faceted_search_valid() {
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value|
|
||||
value
|
||||
.all(|value| value
|
||||
.get("tags")
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.contains(&Value::String("bug".to_owned()))
|
||||
&& (value
|
||||
.get("color")
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "green")));
|
||||
|
||||
&& (value.get("color").unwrap() == "blue"
|
||||
|| value.get("color").unwrap() == "Green")));
|
||||
});
|
||||
}
|
||||
|
||||
@ -335,7 +325,10 @@ async fn placeholder_test_faceted_search_invalid() {
|
||||
let query = json!({
|
||||
"facetFilters": ["color:blue"]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(
|
||||
status_code,
|
||||
202
|
||||
));
|
||||
|
||||
let body = json!({
|
||||
"attributesForFaceting": ["color", "tags"]
|
||||
@ -346,34 +339,52 @@ async fn placeholder_test_faceted_search_invalid() {
|
||||
let query = json!({
|
||||
"facetFilters": []
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(
|
||||
status_code,
|
||||
202
|
||||
));
|
||||
// [[]]
|
||||
let query = json!({
|
||||
"facetFilters": [[]]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(
|
||||
status_code,
|
||||
202
|
||||
));
|
||||
// ["color:green", []]
|
||||
let query = json!({
|
||||
"facetFilters": ["color:green", []]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(
|
||||
status_code,
|
||||
202
|
||||
));
|
||||
|
||||
// too much depth
|
||||
// [[[]]]
|
||||
let query = json!({
|
||||
"facetFilters": [[[]]]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(
|
||||
status_code,
|
||||
202
|
||||
));
|
||||
// [["color:green", ["color:blue"]]]
|
||||
let query = json!({
|
||||
"facetFilters": [["color:green", ["color:blue"]]]
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(
|
||||
status_code,
|
||||
202
|
||||
));
|
||||
// "color:green"
|
||||
let query = json!({
|
||||
"facetFilters": "color:green"
|
||||
});
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(status_code, 202));
|
||||
test_post_get_search!(server, query, |_response, status_code| assert_ne!(
|
||||
status_code,
|
||||
202
|
||||
));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -381,8 +392,7 @@ async fn placeholder_test_facet_count() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
// test without facet distribution
|
||||
let query = json!({
|
||||
});
|
||||
let query = json!({});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert!(response.get("exhaustiveFacetsCount").is_none());
|
||||
assert!(response.get("facetsDistribution").is_none());
|
||||
@ -404,31 +414,80 @@ async fn placeholder_test_facet_count() {
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
println!("{}", response);
|
||||
assert!(response.get("exhaustiveFacetsCount").is_some());
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 1);
|
||||
assert_eq!(
|
||||
response
|
||||
.get("facetsDistribution")
|
||||
.unwrap()
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.values()
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
});
|
||||
// searching on color and tags
|
||||
let query = json!({
|
||||
"facetsDistribution": ["color", "tags"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let facets = response.get("facetsDistribution").unwrap().as_object().unwrap();
|
||||
let facets = response
|
||||
.get("facetsDistribution")
|
||||
.unwrap()
|
||||
.as_object()
|
||||
.unwrap();
|
||||
assert_eq!(facets.values().count(), 2);
|
||||
assert_ne!(!facets.get("color").unwrap().as_object().unwrap().values().count(), 0);
|
||||
assert_ne!(!facets.get("tags").unwrap().as_object().unwrap().values().count(), 0);
|
||||
assert_ne!(
|
||||
!facets
|
||||
.get("color")
|
||||
.unwrap()
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.values()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
assert_ne!(
|
||||
!facets
|
||||
.get("tags")
|
||||
.unwrap()
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.values()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
});
|
||||
// wildcard
|
||||
let query = json!({
|
||||
"facetsDistribution": ["*"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 2);
|
||||
assert_eq!(
|
||||
response
|
||||
.get("facetsDistribution")
|
||||
.unwrap()
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.values()
|
||||
.count(),
|
||||
2
|
||||
);
|
||||
});
|
||||
// wildcard with other attributes:
|
||||
let query = json!({
|
||||
"facetsDistribution": ["color", "*"]
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 2);
|
||||
assert_eq!(
|
||||
response
|
||||
.get("facetsDistribution")
|
||||
.unwrap()
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.values()
|
||||
.count(),
|
||||
2
|
||||
);
|
||||
});
|
||||
|
||||
// empty facet list
|
||||
@ -436,7 +495,16 @@ async fn placeholder_test_facet_count() {
|
||||
"facetsDistribution": []
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 0);
|
||||
assert_eq!(
|
||||
response
|
||||
.get("facetsDistribution")
|
||||
.unwrap()
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.values()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
});
|
||||
|
||||
// attr not set as facet passed:
|
||||
@ -446,7 +514,6 @@ async fn placeholder_test_facet_count() {
|
||||
test_post_get_search!(server, query, |_response, status_code| {
|
||||
assert_eq!(status_code, 400);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -478,7 +545,9 @@ async fn placeholder_test_sort() {
|
||||
let query = json!({});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
hits.iter().map(|v| v["age"].as_u64().unwrap()).fold(0, |prev, cur| {
|
||||
hits.iter()
|
||||
.map(|v| v["age"].as_u64().unwrap())
|
||||
.fold(0, |prev, cur| {
|
||||
assert!(cur >= prev);
|
||||
cur
|
||||
});
|
||||
@ -489,9 +558,72 @@ async fn placeholder_test_sort() {
|
||||
});
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
hits.iter().map(|v| v["age"].as_u64().unwrap()).fold(0, |prev, cur| {
|
||||
hits.iter()
|
||||
.map(|v| v["age"].as_u64().unwrap())
|
||||
.fold(0, |prev, cur| {
|
||||
assert!(cur >= prev);
|
||||
cur
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn placeholder_search_with_empty_query() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json! ({
|
||||
"q": "",
|
||||
"limit": 3
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
eprintln!("{}", response);
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 3);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_filter_nb_hits_search_placeholder() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
|
||||
server.create_index(body).await;
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "a",
|
||||
"color": "green",
|
||||
"size": 1,
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"content": "a",
|
||||
"color": "green",
|
||||
"size": 2,
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"content": "a",
|
||||
"color": "blue",
|
||||
"size": 3,
|
||||
},
|
||||
]);
|
||||
|
||||
server.add_or_update_multiple_documents(documents).await;
|
||||
let (response, _) = server.search_post(json!({})).await;
|
||||
assert_eq!(response["nbHits"], 3);
|
||||
|
||||
server.update_distinct_attribute(json!("color")).await;
|
||||
|
||||
let (response, _) = server.search_post(json!({})).await;
|
||||
assert_eq!(response["nbHits"], 2);
|
||||
|
||||
let (response, _) = server.search_post(json!({"filters": "size < 3"})).await;
|
||||
println!("result: {}", response);
|
||||
assert_eq!(response["nbHits"], 1);
|
||||
}
|
||||
|
@ -7,12 +7,11 @@ use serde_json::Value;
|
||||
#[macro_use] mod common;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_limit() {
|
||||
async fn search() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json! ({
|
||||
"q": "exercitation",
|
||||
"limit": 3
|
||||
"q": "exercitation"
|
||||
});
|
||||
|
||||
let expected = json!([
|
||||
@ -21,7 +20,7 @@ async fn search_with_limit() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -42,7 +41,141 @@ async fn search_with_limit() {
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
"phone": "+1 (912) 430-3243",
|
||||
"address": "883 Dennett Place, Knowlton, New Mexico, 9219",
|
||||
"about": "Exercitation minim esse proident cillum velit et deserunt incididunt adipisicing minim. Cillum Lorem consectetur laborum id consequat exercitation velit. Magna dolor excepteur sunt deserunt dolor ullamco non sint proident ipsum. Reprehenderit voluptate sit veniam consectetur ea sunt duis labore deserunt ipsum aute. Eiusmod aliqua anim voluptate id duis tempor aliqua commodo sunt. Do officia ea consectetur nostrud eiusmod laborum.\r\n",
|
||||
"registered": "2019-12-07T07:33:15 -01:00",
|
||||
"latitude": -60.812605,
|
||||
"longitude": -27.129016,
|
||||
"tags": [
|
||||
"bug",
|
||||
"new issue"
|
||||
],
|
||||
"isActive": true
|
||||
},
|
||||
{
|
||||
"id": 49,
|
||||
"balance": "$1,476.39",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "brown",
|
||||
"name": "Maureen Dale",
|
||||
"gender": "female",
|
||||
"email": "maureendale@chorizon.com",
|
||||
"phone": "+1 (984) 538-3684",
|
||||
"address": "817 Newton Street, Bannock, Wyoming, 1468",
|
||||
"about": "Tempor mollit exercitation excepteur cupidatat reprehenderit ad ex. Nulla laborum proident incididunt quis. Esse laborum deserunt qui anim. Sunt incididunt pariatur cillum anim proident eu ullamco dolor excepteur. Ullamco amet culpa nostrud adipisicing duis aliqua consequat duis non eu id mollit velit. Deserunt ullamco amet in occaecat.\r\n",
|
||||
"registered": "2018-04-26T06:04:40 -02:00",
|
||||
"latitude": -64.196802,
|
||||
"longitude": -117.396238,
|
||||
"tags": [
|
||||
"wontfix"
|
||||
],
|
||||
"isActive": true
|
||||
}
|
||||
]);
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
let hits = response["hits"].as_array().unwrap();
|
||||
let hits: Vec<Value> = hits.iter().cloned().take(3).collect();
|
||||
assert_json_eq!(expected.clone(), serde_json::to_value(hits).unwrap(), ordered: false);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_no_params() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json! ({});
|
||||
|
||||
// an empty search should return the 20 first indexed document
|
||||
let dataset: Vec<Value> = serde_json::from_slice(include_bytes!("assets/test_set.json")).unwrap();
|
||||
let expected: Vec<Value> = dataset.into_iter().take(20).collect();
|
||||
let expected: Value = serde_json::to_value(expected).unwrap();
|
||||
|
||||
test_post_get_search!(server, query, |response, _status_code| {
|
||||
assert_json_eq!(expected.clone(), response["hits"].clone(), ordered: false);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_in_unexisting_index() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let query = json! ({
|
||||
"q": "exercitation"
|
||||
});
|
||||
|
||||
let expected = json! ({
|
||||
"message": "Index test not found",
|
||||
"errorCode": "index_not_found",
|
||||
"errorType": "invalid_request_error",
|
||||
"errorLink": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
|
||||
test_post_get_search!(server, query, |response, status_code| {
|
||||
assert_eq!(404, status_code);
|
||||
assert_json_eq!(expected.clone(), response.clone(), ordered: false);
|
||||
});
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_unexpected_params() {
|
||||
|
||||
let query = json! ({"lol": "unexpected"});
|
||||
|
||||
let expected = "unknown field `lol`, expected one of `q`, `offset`, `limit`, `attributesToRetrieve`, `attributesToCrop`, `cropLength`, `attributesToHighlight`, `filters`, `matches`, `facetFilters`, `facetsDistribution` at line 1 column 6";
|
||||
|
||||
let post_query = serde_json::from_str::<meilisearch_http::routes::search::SearchQueryPost>(&query.to_string());
|
||||
assert!(post_query.is_err());
|
||||
assert_eq!(expected, post_query.err().unwrap().to_string());
|
||||
|
||||
let get_query: Result<meilisearch_http::routes::search::SearchQuery, _> = serde_json::from_str(&query.to_string());
|
||||
assert!(get_query.is_err());
|
||||
assert_eq!(expected, get_query.err().unwrap().to_string());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_limit() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let query = json! ({
|
||||
"q": "exercitation",
|
||||
"limit": 3
|
||||
});
|
||||
|
||||
let expected = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
"phone": "+1 (995) 479-3174",
|
||||
"address": "442 Beverly Road, Ventress, New Mexico, 3361",
|
||||
"about": "Exercitation officia mollit proident nostrud ea. Pariatur voluptate labore nostrud magna duis non elit et incididunt Lorem velit duis amet commodo. Irure in velit laboris pariatur. Do tempor ex deserunt duis minim amet.\r\n",
|
||||
"registered": "2020-03-18T11:12:21 -01:00",
|
||||
"latitude": -24.356932,
|
||||
"longitude": 27.184808,
|
||||
"tags": [
|
||||
"new issue",
|
||||
"bug"
|
||||
],
|
||||
"isActive": true
|
||||
},
|
||||
{
|
||||
"id": 59,
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -101,7 +234,7 @@ async fn search_with_offset() {
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -142,7 +275,7 @@ async fn search_with_offset() {
|
||||
"balance": "$2,668.55",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lucas Hess",
|
||||
"gender": "male",
|
||||
"email": "lucashess@chorizon.com",
|
||||
@ -181,7 +314,7 @@ async fn search_with_attribute_to_highlight_wildcard() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -201,7 +334,7 @@ async fn search_with_attribute_to_highlight_wildcard() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "<em>Cherry</em> Orr",
|
||||
"gender": "female",
|
||||
"email": "<em>cherry</em>orr@chorizon.com",
|
||||
@ -241,7 +374,7 @@ async fn search_with_attribute_to_highlight_1() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -261,7 +394,7 @@ async fn search_with_attribute_to_highlight_1() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "<em>Cherry</em> Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -301,7 +434,7 @@ async fn search_with_matches() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -355,7 +488,7 @@ async fn search_with_crop() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -375,7 +508,7 @@ async fn search_with_crop() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -413,7 +546,7 @@ async fn search_with_attributes_to_retrieve() {
|
||||
{
|
||||
"name": "Cherry Orr",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"gender": "female"
|
||||
}
|
||||
]);
|
||||
@ -440,7 +573,7 @@ async fn search_with_attributes_to_retrieve_wildcard() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -478,7 +611,7 @@ async fn search_with_filter() {
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -499,7 +632,7 @@ async fn search_with_filter() {
|
||||
"balance": "$2,668.55",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lucas Hess",
|
||||
"gender": "male",
|
||||
"email": "lucashess@chorizon.com",
|
||||
@ -547,7 +680,7 @@ async fn search_with_filter() {
|
||||
"balance": "$2,668.55",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lucas Hess",
|
||||
"gender": "male",
|
||||
"email": "lucashess@chorizon.com",
|
||||
@ -601,7 +734,7 @@ async fn search_with_filter() {
|
||||
"balance": "$1,913.42",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 24,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Emma Jacobs",
|
||||
"gender": "female",
|
||||
"email": "emmajacobs@chorizon.com",
|
||||
@ -705,7 +838,7 @@ async fn search_with_filter() {
|
||||
"balance": "$1,921.58",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -726,7 +859,7 @@ async fn search_with_filter() {
|
||||
"balance": "$2,668.55",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 36,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Lucas Hess",
|
||||
"gender": "male",
|
||||
"email": "lucashess@chorizon.com",
|
||||
@ -779,7 +912,7 @@ async fn search_with_filter() {
|
||||
"balance": "$1,351.43",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 28,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Evans Wagner",
|
||||
"gender": "male",
|
||||
"email": "evanswagner@chorizon.com",
|
||||
@ -823,7 +956,7 @@ async fn search_with_attributes_to_highlight_and_matches() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -843,7 +976,7 @@ async fn search_with_attributes_to_highlight_and_matches() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "<em>Cherry</em> Orr",
|
||||
"gender": "female",
|
||||
"email": "<em>cherry</em>orr@chorizon.com",
|
||||
@ -900,7 +1033,7 @@ async fn search_with_attributes_to_highlight_and_matches_and_crop() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -920,7 +1053,7 @@ async fn search_with_attributes_to_highlight_and_matches_and_crop() {
|
||||
"balance": "$1,706.13",
|
||||
"picture": "http://placehold.it/32x32",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -1223,7 +1356,7 @@ async fn test_faceted_search_valid() {
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.all(|value| value.get("color").unwrap() == "green"));
|
||||
.all(|value| value.get("color").unwrap() == "Green"));
|
||||
});
|
||||
|
||||
let query = json!({
|
||||
@ -1318,7 +1451,7 @@ async fn test_faceted_search_valid() {
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "green"));
|
||||
.unwrap() == "Green"));
|
||||
});
|
||||
// test and-or: ["tags:bug", ["color:blue", "color:green"]]
|
||||
let query = json!({
|
||||
@ -1345,7 +1478,7 @@ async fn test_faceted_search_valid() {
|
||||
.unwrap() == "blue"
|
||||
|| value
|
||||
.get("color")
|
||||
.unwrap() == "green")));
|
||||
.unwrap() == "Green")));
|
||||
|
||||
});
|
||||
}
|
||||
@ -1469,6 +1602,14 @@ async fn test_facet_count() {
|
||||
println!("{}", response);
|
||||
assert!(response.get("exhaustiveFacetsCount").is_some());
|
||||
assert_eq!(response.get("facetsDistribution").unwrap().as_object().unwrap().values().count(), 1);
|
||||
// assert that case is preserved
|
||||
assert!(response["facetsDistribution"]
|
||||
.as_object()
|
||||
.unwrap()["color"]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("Green")
|
||||
.is_some());
|
||||
});
|
||||
// searching on color and tags
|
||||
let query = json!({
|
||||
@ -1688,3 +1829,51 @@ async fn update_documents_with_facet_distribution() {
|
||||
let (response2, _) = server.search_post(search).await;
|
||||
assert_json_eq!(expected_facet_distribution, response2["facetsDistribution"].clone());
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_filter_nb_hits_search_normal() {
|
||||
let mut server = common::Server::with_uid("test");
|
||||
|
||||
let body = json!({
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
|
||||
server.create_index(body).await;
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "a",
|
||||
"color": "green",
|
||||
"size": 1,
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"content": "a",
|
||||
"color": "green",
|
||||
"size": 2,
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"content": "a",
|
||||
"color": "blue",
|
||||
"size": 3,
|
||||
},
|
||||
]);
|
||||
|
||||
server.add_or_update_multiple_documents(documents).await;
|
||||
let (response, _) = server.search_post(json!({"q": "a"})).await;
|
||||
assert_eq!(response["nbHits"], 3);
|
||||
|
||||
let (response, _) = server.search_post(json!({"q": "a", "filters": "size = 1"})).await;
|
||||
assert_eq!(response["nbHits"], 1);
|
||||
|
||||
server.update_distinct_attribute(json!("color")).await;
|
||||
|
||||
let (response, _) = server.search_post(json!({"q": "a"})).await;
|
||||
assert_eq!(response["nbHits"], 2);
|
||||
|
||||
let (response, _) = server.search_post(json!({"q": "a", "filters": "size < 3"})).await;
|
||||
println!("result: {}", response);
|
||||
assert_eq!(response["nbHits"], 1);
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ async fn search_with_settings_stop_words() {
|
||||
{
|
||||
"balance": "$1,921.58",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -140,7 +140,7 @@ async fn search_with_settings_stop_words() {
|
||||
{
|
||||
"balance": "$1,706.13",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -213,7 +213,7 @@ async fn search_with_settings_synonyms() {
|
||||
{
|
||||
"balance": "$1,921.58",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -223,7 +223,7 @@ async fn search_with_settings_synonyms() {
|
||||
{
|
||||
"balance": "$1,706.13",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -292,7 +292,7 @@ async fn search_with_settings_ranking_rules() {
|
||||
{
|
||||
"balance": "$1,921.58",
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -302,7 +302,7 @@ async fn search_with_settings_ranking_rules() {
|
||||
{
|
||||
"balance": "$1,706.13",
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
@ -438,7 +438,7 @@ async fn search_with_settings_displayed_attributes() {
|
||||
let expect = json!([
|
||||
{
|
||||
"age": 31,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Harper Carson",
|
||||
"gender": "male",
|
||||
"email": "harpercarson@chorizon.com",
|
||||
@ -446,7 +446,7 @@ async fn search_with_settings_displayed_attributes() {
|
||||
},
|
||||
{
|
||||
"age": 27,
|
||||
"color": "green",
|
||||
"color": "Green",
|
||||
"name": "Cherry Orr",
|
||||
"gender": "female",
|
||||
"email": "cherryorr@chorizon.com",
|
||||
|
@ -468,3 +468,56 @@ async fn settings_that_contains_wildcard_is_wildcard() {
|
||||
assert_eq!(response["searchableAttributes"].as_array().unwrap()[0], "*");
|
||||
assert_eq!(response["displayedAttributes"].as_array().unwrap()[0], "*");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_displayed_attributes_field() {
|
||||
let mut server = common::Server::test_server().await;
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"desc(registered)",
|
||||
"desc(age)",
|
||||
],
|
||||
"distinctAttribute": "id",
|
||||
"searchableAttributes": [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
"gender",
|
||||
"email",
|
||||
"phone",
|
||||
"address",
|
||||
"registered",
|
||||
"about"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"age",
|
||||
"email",
|
||||
"gender",
|
||||
"name",
|
||||
"registered",
|
||||
],
|
||||
"stopWords": [
|
||||
"ad",
|
||||
"in",
|
||||
"ut",
|
||||
],
|
||||
"synonyms": {
|
||||
"road": ["avenue", "street"],
|
||||
"street": ["avenue"],
|
||||
},
|
||||
"attributesForFaceting": ["name"],
|
||||
});
|
||||
|
||||
server.update_all_settings(body.clone()).await;
|
||||
|
||||
let (response, _status_code) = server.get_all_settings().await;
|
||||
|
||||
assert_json_eq!(body, response, ordered: true);
|
||||
}
|
@ -1,13 +1,13 @@
|
||||
[package]
|
||||
name = "meilisearch-schema"
|
||||
version = "0.13.0"
|
||||
version = "0.17.0"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.13.0" }
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde_json = { version = "1.0.50", features = ["preserve_order"] }
|
||||
meilisearch-error = { path = "../meilisearch-error", version = "0.17.0" }
|
||||
serde = { version = "1.0.117", features = ["derive"] }
|
||||
serde_json = { version = "1.0.59", features = ["preserve_order"] }
|
||||
zerocopy = "0.3.0"
|
||||
|
@ -16,7 +16,7 @@ impl fmt::Display for Error {
|
||||
use self::Error::*;
|
||||
match self {
|
||||
FieldNameNotFound(field) => write!(f, "The field {:?} doesn't exist", field),
|
||||
PrimaryKeyAlreadyPresent => write!(f, "The schema already have an primary key. It's impossible to update it"),
|
||||
PrimaryKeyAlreadyPresent => write!(f, "A primary key is already present. It's impossible to update it"),
|
||||
MaxFieldsLimitExceeded => write!(f, "The maximum of possible reattributed field id has been reached"),
|
||||
}
|
||||
}
|
||||
|
@ -25,10 +25,7 @@ impl<T> OptionAll<T> {
|
||||
}
|
||||
|
||||
pub fn is_all(&self) -> bool {
|
||||
match self {
|
||||
OptionAll::All => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self, OptionAll::All)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-tokenizer"
|
||||
version = "0.13.0"
|
||||
version = "0.17.0"
|
||||
license = "MIT"
|
||||
authors = ["Kerollmops <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-types"
|
||||
version = "0.13.0"
|
||||
version = "0.17.0"
|
||||
license = "MIT"
|
||||
authors = ["Clément Renault <renault.cle@gmail.com>"]
|
||||
edition = "2018"
|
||||
@ -10,7 +10,7 @@ version = "0.3.0"
|
||||
optional = true
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0.105"
|
||||
version = "1.0.117"
|
||||
features = ["derive"]
|
||||
optional = true
|
||||
|
||||
|
Reference in New Issue
Block a user