mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-11 07:05:43 +00:00
Compare commits
95 Commits
with_rate_
...
stable-arc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b7d9551870 | ||
|
|
0aa3f667d4 | ||
|
|
1eba5d45ea | ||
|
|
cfa78418f2 | ||
|
|
aaf5abbf1c | ||
|
|
f724f8adfe | ||
|
|
cde2a96486 | ||
|
|
ceca386dc0 | ||
|
|
370a45a58b | ||
|
|
7093bae131 | ||
|
|
fb785dc5ac | ||
|
|
3a0b1a0c0e | ||
|
|
af808462b6 | ||
|
|
2999ae3da4 | ||
|
|
23ec7db3f9 | ||
|
|
f02e5cfaa6 | ||
|
|
8443554b1f | ||
|
|
84c782ce9a | ||
|
|
1392a3b304 | ||
|
|
2ec699a2e7 | ||
|
|
d02837f982 | ||
|
|
6784d17d0e | ||
|
|
415977a41e | ||
|
|
a07e1f7a00 | ||
|
|
b0460abf54 | ||
|
|
48e00cdf3f | ||
|
|
aaea5f87db | ||
|
|
44440bb8f4 | ||
|
|
ec74fd6b44 | ||
|
|
d3bc0c6e93 | ||
|
|
262c8bf68b | ||
|
|
c4a669d056 | ||
|
|
dfaf845382 | ||
|
|
1fe9fccf6e | ||
|
|
9fe32e1e3b | ||
|
|
388305fcb6 | ||
|
|
49bc45e0d4 | ||
|
|
b478b18218 | ||
|
|
877d1735b1 | ||
|
|
a1a29e92fd | ||
|
|
fea969d5e5 | ||
|
|
fcca7475fa | ||
|
|
3fc1d7e67b | ||
|
|
f1884d6910 | ||
|
|
0e6394fafc | ||
|
|
637ca7b9fa | ||
|
|
25e39edc7e | ||
|
|
f908ae2ef4 | ||
|
|
8ddec58430 | ||
|
|
b4d0403518 | ||
|
|
3525c964a7 | ||
|
|
ed51df41e5 | ||
|
|
7f89e302a2 | ||
|
|
c603e17b1d | ||
|
|
07b28ea8cf | ||
|
|
10ab5f6a58 | ||
|
|
684b90066d | ||
|
|
93ab019304 | ||
|
|
1ef517f63d | ||
|
|
1a1ede96de | ||
|
|
93afeedcea | ||
|
|
25d057b75e | ||
|
|
b44c381c2a | ||
|
|
51be75a264 | ||
|
|
4953b62712 | ||
|
|
9473cccc27 | ||
|
|
9327db3e91 | ||
|
|
0fced6f270 | ||
|
|
1387a211d2 | ||
|
|
661b345ad9 | ||
|
|
0f0d1dccf0 | ||
|
|
0331fc7c71 | ||
|
|
b4434dcad2 | ||
|
|
d08d97bf43 | ||
|
|
5cfcdbb55a | ||
|
|
c77c3a90a0 | ||
|
|
a8991ccb64 | ||
|
|
761bd3aca4 | ||
|
|
26ab6ab0cc | ||
|
|
379522ace3 | ||
|
|
1d5f17a9ea | ||
|
|
8bb260bf3e | ||
|
|
52b38bee9d | ||
|
|
f5454dfa60 | ||
|
|
1e464e87fc | ||
|
|
6126fc8d98 | ||
|
|
2fdd814e57 | ||
|
|
20fa103992 | ||
|
|
d5638d2c27 | ||
|
|
932414bf72 | ||
|
|
b20025c01e | ||
|
|
3999f74f78 | ||
|
|
739b9f5505 | ||
|
|
722a0da0c3 | ||
|
|
5704a1895d |
134
.github/scripts/is-latest-release.sh
vendored
134
.github/scripts/is-latest-release.sh
vendored
@@ -1,41 +1,127 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
# Used in our CIs to publish the latest Docker image.
|
# Was used in our CIs to publish the latest docker image. Not used anymore, will be used again when v1 and v2 will be out and we will want to maintain multiple stable versions.
|
||||||
|
# Returns "true" or "false" (as a string) to be used in the `if` in GHA
|
||||||
|
|
||||||
# Checks if the current tag ($GITHUB_REF) corresponds to the latest release tag on GitHub
|
# Checks if the current tag should be the latest (in terms of semver and not of release date).
|
||||||
# Returns "true" or "false" (as a string).
|
# Ex: previous tag -> v2.1.1
|
||||||
|
# new tag -> v1.20.3
|
||||||
|
# The new tag (v1.20.3) should NOT be the latest
|
||||||
|
# So it returns "false", the `latest` tag should not be updated for the release v1.20.3 and still need to correspond to v2.1.1
|
||||||
|
|
||||||
GITHUB_API='https://api.github.com/repos/meilisearch/meilisearch/releases'
|
# GLOBAL
|
||||||
PNAME='meilisearch'
|
GREP_SEMVER_REGEXP='v\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)$' # i.e. v[number].[number].[number]
|
||||||
|
|
||||||
# FUNCTIONS
|
# FUNCTIONS
|
||||||
|
|
||||||
# Returns the version of the latest stable version of Meilisearch by setting the $latest variable.
|
# semverParseInto and semverLT from https://github.com/cloudflare/semver_bash/blob/master/semver.sh
|
||||||
|
|
||||||
|
# usage: semverParseInto version major minor patch special
|
||||||
|
# version: the string version
|
||||||
|
# major, minor, patch, special: will be assigned by the function
|
||||||
|
semverParseInto() {
|
||||||
|
local RE='[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)'
|
||||||
|
#MAJOR
|
||||||
|
eval $2=`echo $1 | sed -e "s#$RE#\1#"`
|
||||||
|
#MINOR
|
||||||
|
eval $3=`echo $1 | sed -e "s#$RE#\2#"`
|
||||||
|
#MINOR
|
||||||
|
eval $4=`echo $1 | sed -e "s#$RE#\3#"`
|
||||||
|
#SPECIAL
|
||||||
|
eval $5=`echo $1 | sed -e "s#$RE#\4#"`
|
||||||
|
}
|
||||||
|
|
||||||
|
# usage: semverLT version1 version2
|
||||||
|
semverLT() {
|
||||||
|
local MAJOR_A=0
|
||||||
|
local MINOR_A=0
|
||||||
|
local PATCH_A=0
|
||||||
|
local SPECIAL_A=0
|
||||||
|
|
||||||
|
local MAJOR_B=0
|
||||||
|
local MINOR_B=0
|
||||||
|
local PATCH_B=0
|
||||||
|
local SPECIAL_B=0
|
||||||
|
|
||||||
|
semverParseInto $1 MAJOR_A MINOR_A PATCH_A SPECIAL_A
|
||||||
|
semverParseInto $2 MAJOR_B MINOR_B PATCH_B SPECIAL_B
|
||||||
|
|
||||||
|
if [ $MAJOR_A -lt $MAJOR_B ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ $MAJOR_A -le $MAJOR_B ] && [ $MINOR_A -lt $MINOR_B ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ $MAJOR_A -le $MAJOR_B ] && [ $MINOR_A -le $MINOR_B ] && [ $PATCH_A -lt $PATCH_B ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" == "_" ] && [ "_$SPECIAL_B" == "_" ] ; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" == "_" ] && [ "_$SPECIAL_B" != "_" ] ; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" != "_" ] && [ "_$SPECIAL_B" == "_" ] ; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" < "_$SPECIAL_B" ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Returns the tag of the latest stable release (in terms of semver and not of release date)
|
||||||
get_latest() {
|
get_latest() {
|
||||||
# temp_file is needed because the grep would start before the download is over
|
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
|
||||||
temp_file=$(mktemp -q /tmp/$PNAME.XXXXXXXXX)
|
curl -s 'https://api.github.com/repos/meilisearch/meilisearch/releases' > "$temp_file"
|
||||||
latest_release="$GITHUB_API/latest"
|
releases=$(cat "$temp_file" | \
|
||||||
|
grep -E "tag_name|draft|prerelease" \
|
||||||
|
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||||
|
# Returns a list of [tag_name draft_boolean prerelease_boolean ...]
|
||||||
|
# Ex: v0.10.1 false false v0.9.1-rc.1 false true v0.9.0 false false...
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
i=0
|
||||||
echo "$0: Can't create temp file."
|
latest=""
|
||||||
exit 1
|
current_tag=""
|
||||||
fi
|
for release_info in $releases; do
|
||||||
|
if [ $i -eq 0 ]; then # Checking tag_name
|
||||||
if [ -z "$GITHUB_PAT" ]; then
|
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release
|
||||||
curl -s "$latest_release" > "$temp_file" || return 1
|
current_tag=$release_info
|
||||||
else
|
else
|
||||||
curl -H "Authorization: token $GITHUB_PAT" -s "$latest_release" > "$temp_file" || return 1
|
current_tag=""
|
||||||
fi
|
fi
|
||||||
|
i=1
|
||||||
latest="$(cat "$temp_file" | grep '"tag_name":' | cut -d ':' -f2 | tr -d '"' | tr -d ',' | tr -d ' ')"
|
elif [ $i -eq 1 ]; then # Checking draft boolean
|
||||||
|
if [ "$release_info" = "true" ]; then
|
||||||
|
current_tag=""
|
||||||
|
fi
|
||||||
|
i=2
|
||||||
|
elif [ $i -eq 2 ]; then # Checking prerelease boolean
|
||||||
|
if [ "$release_info" = "true" ]; then
|
||||||
|
current_tag=""
|
||||||
|
fi
|
||||||
|
i=0
|
||||||
|
if [ "$current_tag" != "" ]; then # If the current_tag is valid
|
||||||
|
if [ "$latest" = "" ]; then # If there is no latest yet
|
||||||
|
latest="$current_tag"
|
||||||
|
else
|
||||||
|
semverLT $current_tag $latest # Comparing latest and the current tag
|
||||||
|
if [ $? -eq 1 ]; then
|
||||||
|
latest="$current_tag"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
rm -f "$temp_file"
|
rm -f "$temp_file"
|
||||||
return 0
|
echo $latest
|
||||||
}
|
}
|
||||||
|
|
||||||
# MAIN
|
# MAIN
|
||||||
current_tag="$(echo $GITHUB_REF | tr -d 'refs/tags/')"
|
current_tag="$(echo $GITHUB_REF | tr -d 'refs/tags/')"
|
||||||
get_latest
|
latest="$(get_latest)"
|
||||||
|
|
||||||
if [ "$current_tag" != "$latest" ]; then
|
if [ "$current_tag" != "$latest" ]; then
|
||||||
# The current release tag is not the latest
|
# The current release tag is not the latest
|
||||||
@@ -44,5 +130,3 @@ else
|
|||||||
# The current release tag is the latest
|
# The current release tag is the latest
|
||||||
echo "true"
|
echo "true"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
exit 0
|
|
||||||
|
|||||||
33
.github/workflows/coverage.yml
vendored
Normal file
33
.github/workflows/coverage.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
name: Execute code coverage
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
nightly-coverage:
|
||||||
|
runs-on: ubuntu-18.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
override: true
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: clean
|
||||||
|
- uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --all-features --no-fail-fast
|
||||||
|
env:
|
||||||
|
CARGO_INCREMENTAL: "0"
|
||||||
|
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
||||||
|
- uses: actions-rs/grcov@v0.1
|
||||||
|
- name: Upload coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v3
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
file: ${{ steps.coverage.outputs.report }}
|
||||||
|
yml: ./codecov.yml
|
||||||
|
fail_ci_if_error: true
|
||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
github_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
github_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
title: Upgrade dependencies
|
title: Upgrade dependencies
|
||||||
body: |
|
body: |
|
||||||
We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the engine-team repositories that Meilisearch depends on (milli, charabia, heed...).
|
We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the core-team repositories that Meilisearch depends on (milli, charabia, heed...).
|
||||||
|
|
||||||
⚠️ This issue should only be done at the beginning of the sprint!
|
⚠️ This issue should only be done at the beginning of the sprint!
|
||||||
labels: |
|
labels: |
|
||||||
|
|||||||
14
.github/workflows/flaky.yml
vendored
14
.github/workflows/flaky.yml
vendored
@@ -6,20 +6,10 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
flaky:
|
flaky:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
container:
|
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
|
||||||
image: ubuntu:18.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y curl
|
|
||||||
apt-get install build-essential -y
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Install cargo-flaky
|
- name: Install cargo-flaky
|
||||||
run: cargo install cargo-flaky
|
run: cargo install cargo-flaky
|
||||||
- name: Run cargo flaky 100 times
|
- name: Run cargo flaky 100 times
|
||||||
|
|||||||
28
.github/workflows/latest-git-tag.yml
vendored
28
.github/workflows/latest-git-tag.yml
vendored
@@ -1,28 +0,0 @@
|
|||||||
# Create or update a latest git tag when releasing a stable version of Meilisearch
|
|
||||||
name: Update latest git tag
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-version:
|
|
||||||
name: Check the version validity
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Check release validity
|
|
||||||
if: github.event_name == 'release'
|
|
||||||
run: bash .github/scripts/check-release.sh
|
|
||||||
|
|
||||||
update-latest-tag:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: rickstaa/action-create-tag@v1
|
|
||||||
with:
|
|
||||||
tag: "latest"
|
|
||||||
message: "Latest stable release of Meilisearch"
|
|
||||||
# Move the tag if `latest` already exists
|
|
||||||
force_push_tag: true
|
|
||||||
github_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
14
.github/workflows/milestone-workflow.yml
vendored
14
.github/workflows/milestone-workflow.yml
vendored
@@ -3,8 +3,8 @@ name: Milestone's workflow
|
|||||||
# /!\ No git flow are handled here
|
# /!\ No git flow are handled here
|
||||||
|
|
||||||
# For each Milestone created (not opened!), and if the release is NOT a patch release (only the patch changed)
|
# For each Milestone created (not opened!), and if the release is NOT a patch release (only the patch changed)
|
||||||
# - the roadmap issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/roadmap-issue.md
|
# - the roadmap issue is created, see https://github.com/meilisearch/core-team/blob/main/issue-templates/roadmap-issue.md
|
||||||
# - the changelog issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/changelog-issue.md
|
# - the changelog issue is created, see https://github.com/meilisearch/core-team/blob/main/issue-templates/changelog-issue.md
|
||||||
|
|
||||||
# For each Milestone closed
|
# For each Milestone closed
|
||||||
# - the `release_version` label is created
|
# - the `release_version` label is created
|
||||||
@@ -31,6 +31,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
is-patch: ${{ steps.check-patch.outputs.is-patch }}
|
is-patch: ${{ steps.check-patch.outputs.is-patch }}
|
||||||
|
env:
|
||||||
|
MILESTONE_VERSION: ${{ github.event.milestone.title }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Check if this release is a patch release only
|
- name: Check if this release is a patch release only
|
||||||
@@ -39,10 +41,10 @@ jobs:
|
|||||||
echo version: $MILESTONE_VERSION
|
echo version: $MILESTONE_VERSION
|
||||||
if [[ $MILESTONE_VERSION =~ ^v[0-9]+\.[0-9]+\.0$ ]]; then
|
if [[ $MILESTONE_VERSION =~ ^v[0-9]+\.[0-9]+\.0$ ]]; then
|
||||||
echo 'This is NOT a patch release'
|
echo 'This is NOT a patch release'
|
||||||
echo "is-patch=false" >> $GITHUB_OUTPUT
|
echo ::set-output name=is-patch::false
|
||||||
elif [[ $MILESTONE_VERSION =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
elif [[ $MILESTONE_VERSION =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
echo 'This is a patch release'
|
echo 'This is a patch release'
|
||||||
echo "is-patch=true" >> $GITHUB_OUTPUT
|
echo ::set-output name=is-patch::true
|
||||||
else
|
else
|
||||||
echo "Not a valid format of release, check the Milestone's title."
|
echo "Not a valid format of release, check the Milestone's title."
|
||||||
echo 'Should be vX.Y.Z'
|
echo 'Should be vX.Y.Z'
|
||||||
@@ -59,7 +61,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Download the issue template
|
- name: Download the issue template
|
||||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/roadmap-issue.md > $ISSUE_TEMPLATE
|
run: curl -s https://raw.githubusercontent.com/meilisearch/core-team/main/issue-templates/roadmap-issue.md > $ISSUE_TEMPLATE
|
||||||
- name: Replace all empty occurrences in the templates
|
- name: Replace all empty occurrences in the templates
|
||||||
run: |
|
run: |
|
||||||
# Replace all <<version>> occurrences
|
# Replace all <<version>> occurrences
|
||||||
@@ -92,7 +94,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Download the issue template
|
- name: Download the issue template
|
||||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/changelog-issue.md > $ISSUE_TEMPLATE
|
run: curl -s https://raw.githubusercontent.com/meilisearch/core-team/main/issue-templates/changelog-issue.md > $ISSUE_TEMPLATE
|
||||||
- name: Replace all empty occurrences in the templates
|
- name: Replace all empty occurrences in the templates
|
||||||
run: |
|
run: |
|
||||||
# Replace all <<version>> occurrences
|
# Replace all <<version>> occurrences
|
||||||
|
|||||||
71
.github/workflows/publish-binaries.yml
vendored
71
.github/workflows/publish-binaries.yml
vendored
@@ -24,70 +24,44 @@ jobs:
|
|||||||
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||||
|
|
||||||
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
echo "stable=true" >> $GITHUB_OUTPUT
|
echo ::set-output name=stable::true
|
||||||
else
|
else
|
||||||
echo "stable=false" >> $GITHUB_OUTPUT
|
echo ::set-output name=stable::false
|
||||||
fi
|
fi
|
||||||
- name: Check release validity
|
- name: Check release validity
|
||||||
if: github.event_name == 'release' && steps.check-tag-format.outputs.stable == 'true'
|
if: github.event_name == 'release' && steps.check-tag-format.outputs.stable == 'true'
|
||||||
run: bash .github/scripts/check-release.sh
|
run: bash .github/scripts/check-release.sh
|
||||||
|
|
||||||
publish-linux:
|
publish:
|
||||||
name: Publish binary for Linux
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: check-version
|
|
||||||
container:
|
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27
|
|
||||||
image: ubuntu:18.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Install needed dependencies
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y curl
|
|
||||||
apt-get install build-essential -y
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Build
|
|
||||||
run: cargo build --release --locked
|
|
||||||
# No need to upload binaries for dry run (cron)
|
|
||||||
- name: Upload binaries to release
|
|
||||||
if: github.event_name == 'release'
|
|
||||||
uses: svenstaro/upload-release-action@2.3.0
|
|
||||||
with:
|
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
file: target/release/meilisearch
|
|
||||||
asset_name: meilisearch-linux-amd64
|
|
||||||
tag: ${{ github.ref }}
|
|
||||||
|
|
||||||
publish-macos-windows:
|
|
||||||
name: Publish binary for ${{ matrix.os }}
|
name: Publish binary for ${{ matrix.os }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
needs: check-version
|
needs: check-version
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-12, windows-2022]
|
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||||
include:
|
include:
|
||||||
- os: macos-12
|
- os: ubuntu-18.04
|
||||||
|
artifact_name: meilisearch
|
||||||
|
asset_name: meilisearch-linux-amd64
|
||||||
|
- os: macos-latest
|
||||||
artifact_name: meilisearch
|
artifact_name: meilisearch
|
||||||
asset_name: meilisearch-macos-amd64
|
asset_name: meilisearch-macos-amd64
|
||||||
- os: windows-2022
|
- os: windows-latest
|
||||||
artifact_name: meilisearch.exe
|
artifact_name: meilisearch.exe
|
||||||
asset_name: meilisearch-windows-amd64.exe
|
asset_name: meilisearch-windows-amd64.exe
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: hecrj/setup-rust-action@master
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
rust-version: stable
|
||||||
override: true
|
- uses: actions/checkout@v3
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.3.0
|
uses: svenstaro/upload-release-action@v1-release
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/${{ matrix.artifact_name }}
|
file: target/release/${{ matrix.artifact_name }}
|
||||||
@@ -98,13 +72,15 @@ jobs:
|
|||||||
name: Publish binary for macOS silicon
|
name: Publish binary for macOS silicon
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
needs: check-version
|
needs: check-version
|
||||||
|
continue-on-error: false
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: macos-12
|
- os: macos-latest
|
||||||
target: aarch64-apple-darwin
|
target: aarch64-apple-darwin
|
||||||
asset_name: meilisearch-macos-apple-silicon
|
asset_name: meilisearch-macos-apple-silicon
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@@ -123,7 +99,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.3.0
|
uses: svenstaro/upload-release-action@v1-release
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
@@ -134,6 +110,7 @@ jobs:
|
|||||||
name: Publish binary for aarch64
|
name: Publish binary for aarch64
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
needs: check-version
|
needs: check-version
|
||||||
|
continue-on-error: false
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
@@ -144,9 +121,11 @@ jobs:
|
|||||||
linker: gcc-aarch64-linux-gnu
|
linker: gcc-aarch64-linux-gnu
|
||||||
use-cross: true
|
use-cross: true
|
||||||
asset_name: meilisearch-linux-aarch64
|
asset_name: meilisearch-linux-aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
@@ -154,13 +133,16 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
override: true
|
override: true
|
||||||
|
|
||||||
- name: APT update
|
- name: APT update
|
||||||
run: |
|
run: |
|
||||||
sudo apt update
|
sudo apt update
|
||||||
|
|
||||||
- name: Install target specific tools
|
- name: Install target specific tools
|
||||||
if: matrix.use-cross
|
if: matrix.use-cross
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install -y ${{ matrix.linker }}
|
sudo apt-get install -y ${{ matrix.linker }}
|
||||||
|
|
||||||
- name: Configure target aarch64 GNU
|
- name: Configure target aarch64 GNU
|
||||||
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
if: matrix.target == 'aarch64-unknown-linux-gnu'
|
||||||
## Environment variable is not passed using env:
|
## Environment variable is not passed using env:
|
||||||
@@ -172,18 +154,21 @@ jobs:
|
|||||||
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config
|
||||||
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config
|
||||||
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Cargo build
|
- name: Cargo build
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: build
|
command: build
|
||||||
use-cross: ${{ matrix.use-cross }}
|
use-cross: ${{ matrix.use-cross }}
|
||||||
args: --release --target ${{ matrix.target }}
|
args: --release --target ${{ matrix.target }}
|
||||||
|
|
||||||
- name: List target output files
|
- name: List target output files
|
||||||
run: ls -lR ./target
|
run: ls -lR ./target
|
||||||
|
|
||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.3.0
|
uses: svenstaro/upload-release-action@v1-release
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
|
|||||||
26
.github/workflows/publish-deb-brew-pkg.yml
vendored
26
.github/workflows/publish-deb-brew-pkg.yml
vendored
@@ -1,8 +1,8 @@
|
|||||||
name: Publish to APT repository & Homebrew
|
name: Publish deb pkg to GitHub release & APT repository & Homebrew
|
||||||
|
|
||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [released]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-version:
|
check-version:
|
||||||
@@ -15,27 +15,19 @@ jobs:
|
|||||||
|
|
||||||
debian:
|
debian:
|
||||||
name: Publish debian packagge
|
name: Publish debian packagge
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
needs: check-version
|
needs: check-version
|
||||||
container:
|
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27
|
|
||||||
image: ubuntu:18.04
|
|
||||||
steps:
|
steps:
|
||||||
- name: Install needed dependencies
|
- uses: hecrj/setup-rust-action@master
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y curl
|
|
||||||
apt-get install build-essential -y
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
rust-version: stable
|
||||||
override: true
|
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install cargo-deb
|
run: cargo install cargo-deb
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch-http -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
uses: svenstaro/upload-release-action@2.3.0
|
uses: svenstaro/upload-release-action@v1-release
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/debian/meilisearch.deb
|
file: target/debian/meilisearch.deb
|
||||||
@@ -46,11 +38,11 @@ jobs:
|
|||||||
|
|
||||||
homebrew:
|
homebrew:
|
||||||
name: Bump Homebrew formula
|
name: Bump Homebrew formula
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
needs: check-version
|
needs: check-version
|
||||||
steps:
|
steps:
|
||||||
- name: Create PR to Homebrew
|
- name: Create PR to Homebrew
|
||||||
uses: mislav/bump-homebrew-formula-action@v2
|
uses: mislav/bump-homebrew-formula-action@v1
|
||||||
with:
|
with:
|
||||||
formula-name: meilisearch
|
formula-name: meilisearch
|
||||||
env:
|
env:
|
||||||
|
|||||||
63
.github/workflows/publish-docker-images.yml
vendored
63
.github/workflows/publish-docker-images.yml
vendored
@@ -1,16 +1,10 @@
|
|||||||
---
|
---
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
# Will run for every tag pushed except `latest`
|
|
||||||
# When the `latest` git tag is created with this [CI](../latest-git-tag.yml)
|
|
||||||
# we don't need to create a Docker `latest` image again.
|
|
||||||
# The `latest` Docker image push is already done in this CI when releasing a stable version of Meilisearch.
|
|
||||||
tags-ignore:
|
|
||||||
- latest
|
|
||||||
# Both `schedule` and `workflow_dispatch` build the nightly tag
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 23 * * *' # Every day at 11:00pm
|
- cron: '0 4 * * *' # Every day at 4:00am
|
||||||
workflow_dispatch:
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
name: Publish tagged images to Docker Hub
|
name: Publish tagged images to Docker Hub
|
||||||
|
|
||||||
@@ -20,43 +14,27 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
# If we are running a cron or manual job ('schedule' or 'workflow_dispatch' event), it means we are publishing the `nightly` tag, so not considered stable.
|
# Check if the tag has the v<nmumber>.<number>.<number> format. If yes, it means we are publishing an official release.
|
||||||
# If we have pushed a tag, and the tag has the v<nmumber>.<number>.<number> format, it means we are publishing an official release, so considered stable.
|
|
||||||
# In this situation, we need to set `output.stable` to create/update the following tags (additionally to the `vX.Y.Z` Docker tag):
|
# In this situation, we need to set `output.stable` to create/update the following tags (additionally to the `vX.Y.Z` Docker tag):
|
||||||
# - a `vX.Y` (without patch version) Docker tag
|
# - a `vX.Y` (without patch version) Docker tag
|
||||||
# - a `latest` Docker tag
|
# - a `latest` Docker tag
|
||||||
# For any other tag pushed, this is not considered stable.
|
- name: Check tag format
|
||||||
- name: Define if stable and latest release
|
if: github.event_name != 'schedule'
|
||||||
id: check-tag-format
|
id: check-tag-format
|
||||||
env:
|
|
||||||
# To avoid request limit with the .github/scripts/is-latest-release.sh script
|
|
||||||
GITHUB_PATH: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
run: |
|
run: |
|
||||||
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||||
echo "latest=false" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
if [[ ${{ github.event_name }} != 'push' ]]; then
|
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
echo "stable=false" >> $GITHUB_OUTPUT
|
echo ::set-output name=stable::true
|
||||||
elif [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "stable=true" >> $GITHUB_OUTPUT
|
|
||||||
echo "latest=$(sh .github/scripts/is-latest-release.sh)" >> $GITHUB_OUTPUT
|
|
||||||
else
|
else
|
||||||
echo "stable=false" >> $GITHUB_OUTPUT
|
echo ::set-output name=stable::false
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check only the validity of the tag for stable releases (not for pre-releases or other tags)
|
# Check only the validity of the tag for official releases (not for pre-releases or other tags)
|
||||||
- name: Check release validity
|
- name: Check release validity
|
||||||
if: steps.check-tag-format.outputs.stable == 'true'
|
if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable == 'true'
|
||||||
run: bash .github/scripts/check-release.sh
|
run: bash .github/scripts/check-release.sh
|
||||||
|
|
||||||
- name: Set build-args for Docker buildx
|
|
||||||
id: build-metadata
|
|
||||||
run: |
|
|
||||||
# Extract commit date
|
|
||||||
commit_date=$(git show -s --format=%cd --date=iso-strict ${{ github.sha }})
|
|
||||||
|
|
||||||
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
@@ -64,6 +42,7 @@ jobs:
|
|||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
|
if: github.event_name != 'schedule'
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
@@ -74,29 +53,25 @@ jobs:
|
|||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v4
|
||||||
with:
|
with:
|
||||||
images: getmeili/meilisearch
|
images: getmeili/meilisearch
|
||||||
# Prevent `latest` to be updated for each new tag pushed.
|
# The latest and `vX.Y` tags are only pushed for the official Meilisearch releases
|
||||||
# We need latest and `vX.Y` tags to only be pushed for the stable Meilisearch releases.
|
# See https://github.com/docker/metadata-action#latest-tag
|
||||||
flavor: latest=false
|
flavor: latest=false
|
||||||
tags: |
|
tags: |
|
||||||
type=ref,event=tag
|
type=ref,event=tag
|
||||||
type=raw,value=nightly,enable=${{ github.event_name != 'push' }}
|
|
||||||
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
push: true
|
# We do not push tags for the cron jobs, this is only for test purposes
|
||||||
|
push: ${{ github.event_name != 'schedule' }}
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
build-args: |
|
|
||||||
COMMIT_SHA=${{ github.sha }}
|
|
||||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
|
||||||
|
|
||||||
# /!\ Don't touch this without checking with Cloud team
|
# /!\ Don't touch this without checking with Cloud team
|
||||||
- name: Send CI information to Cloud team
|
- name: Send CI information to Cloud team
|
||||||
# Do not send if nightly build (i.e. 'schedule' or 'workflow_dispatch' event)
|
if: github.event_name != 'schedule'
|
||||||
if: github.event_name == 'push'
|
|
||||||
uses: peter-evans/repository-dispatch@v2
|
uses: peter-evans/repository-dispatch@v2
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
|
|||||||
57
.github/workflows/rust.yml
vendored
57
.github/workflows/rust.yml
vendored
@@ -15,46 +15,17 @@ env:
|
|||||||
RUSTFLAGS: "-D warnings"
|
RUSTFLAGS: "-D warnings"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-linux:
|
tests:
|
||||||
name: Tests on ubuntu-18.04
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
|
||||||
image: ubuntu:18.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Install needed dependencies
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y curl
|
|
||||||
apt-get install build-essential -y
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Cache dependencies
|
|
||||||
uses: Swatinem/rust-cache@v2.2.0
|
|
||||||
- name: Run cargo check without any default features
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --locked --release --no-default-features
|
|
||||||
- name: Run cargo test
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: test
|
|
||||||
args: --locked --release
|
|
||||||
|
|
||||||
test-others:
|
|
||||||
name: Tests on ${{ matrix.os }}
|
name: Tests on ${{ matrix.os }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-12, windows-2022]
|
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.2.0
|
uses: Swatinem/rust-cache@v2.0.0
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -69,22 +40,16 @@ jobs:
|
|||||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||||
test-debug:
|
test-debug:
|
||||||
name: Run tests in debug
|
name: Run tests in debug
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
container:
|
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
|
||||||
image: ubuntu:18.04
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
|
||||||
run: |
|
|
||||||
apt-get update && apt-get install -y curl
|
|
||||||
apt-get install build-essential -y
|
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
|
profile: minimal
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.2.0
|
uses: Swatinem/rust-cache@v2.0.0
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -93,7 +58,7 @@ jobs:
|
|||||||
|
|
||||||
clippy:
|
clippy:
|
||||||
name: Run Clippy
|
name: Run Clippy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
@@ -103,7 +68,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.2.0
|
uses: Swatinem/rust-cache@v2.0.0
|
||||||
- name: Run cargo clippy
|
- name: Run cargo clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -112,16 +77,16 @@ jobs:
|
|||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Run Rustfmt
|
name: Run Rustfmt
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: nightly
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.2.0
|
uses: Swatinem/rust-cache@v2.0.0
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
run: cargo fmt --all -- --check
|
run: cargo fmt --all -- --check
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ jobs:
|
|||||||
|
|
||||||
update-version-cargo-toml:
|
update-version-cargo-toml:
|
||||||
name: Update version in Cargo.toml files
|
name: Update version in Cargo.toml files
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
@@ -45,4 +45,3 @@ jobs:
|
|||||||
--body '⚠️ This PR is automatically generated. Check the new version is the expected one before merging.' \
|
--body '⚠️ This PR is automatically generated. Check the new version is the expected one before merging.' \
|
||||||
--label 'skip changelog' \
|
--label 'skip changelog' \
|
||||||
--milestone $NEW_VERSION
|
--milestone $NEW_VERSION
|
||||||
--base $GITHUB_REF_NAME
|
|
||||||
|
|||||||
@@ -10,12 +10,24 @@ If Meilisearch does not offer optimized support for your language, please consid
|
|||||||
|
|
||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Hacktoberfest 2022](#hacktoberfest-2022)
|
||||||
- [Assumptions](#assumptions)
|
- [Assumptions](#assumptions)
|
||||||
- [How to Contribute](#how-to-contribute)
|
- [How to Contribute](#how-to-contribute)
|
||||||
- [Development Workflow](#development-workflow)
|
- [Development Workflow](#development-workflow)
|
||||||
- [Git Guidelines](#git-guidelines)
|
- [Git Guidelines](#git-guidelines)
|
||||||
- [Release Process (for internal team only)](#release-process-for-internal-team-only)
|
- [Release Process (for internal team only)](#release-process-for-internal-team-only)
|
||||||
|
|
||||||
|
## Hacktoberfest 2022
|
||||||
|
|
||||||
|
It's [Hacktoberfest month](https://hacktoberfest.com)! 🥳
|
||||||
|
|
||||||
|
Thanks so much for participating with Meilisearch this year!
|
||||||
|
1. We will follow the quality standards set by the organizers of Hacktoberfest (see detail on their [website](https://hacktoberfest.com/participation/#spam)). Our reviewers will not consider any PR that doesn’t match that standard.
|
||||||
|
2. PRs reviews will take place from Monday to Thursday, during usual working hours, CEST time. If you submit outside of these hours, there’s no need to panic; we will get around to your contribution.
|
||||||
|
3. There will be no issue assignment as we don’t want people to ask to be assigned specific issues and never return, discouraging the volunteer contributors from opening a PR to fix this issue. We take the liberty to choose the PR that best fixes the issue, so we encourage you to get to it as soon as possible and do your best!
|
||||||
|
|
||||||
|
You can check out the longer, more complete guideline documentation [here](https://github.com/meilisearch/.github/blob/main/Hacktoberfest_2022_contributors_guidelines.md).
|
||||||
|
|
||||||
## Assumptions
|
## Assumptions
|
||||||
|
|
||||||
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||||
@@ -97,7 +109,7 @@ _[Read more about this](https://github.com/meilisearch/integration-guides/blob/m
|
|||||||
|
|
||||||
### How to Publish a new Release
|
### How to Publish a new Release
|
||||||
|
|
||||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/engine-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||||
|
|
||||||
### Release assets
|
### Release assets
|
||||||
|
|
||||||
|
|||||||
194
Cargo.lock
generated
194
Cargo.lock
generated
@@ -34,18 +34,6 @@ dependencies = [
|
|||||||
"smallvec",
|
"smallvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "actix-governor"
|
|
||||||
version = "0.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6fbf4afa1e2f7c28040febe2a7199ad0a5fed564dd645da06ab12642c7d22483"
|
|
||||||
dependencies = [
|
|
||||||
"actix-http",
|
|
||||||
"actix-web",
|
|
||||||
"futures",
|
|
||||||
"governor",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "3.2.2"
|
version = "3.2.2"
|
||||||
@@ -629,9 +617,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cargo_toml"
|
name = "cargo_toml"
|
||||||
version = "0.13.0"
|
version = "0.12.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "aa0e3586af56b3bfa51fca452bd56e8dbbbd5d8d81cbf0b7e4e35b695b537eb8"
|
checksum = "6a621d5d6d6c8d086dbaf1fe659981da41a1b63c6bdbba30b4dbb592c6d3bd49"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"toml",
|
"toml",
|
||||||
@@ -1029,19 +1017,6 @@ dependencies = [
|
|||||||
"syn 1.0.103",
|
"syn 1.0.103",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "dashmap"
|
|
||||||
version = "5.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"hashbrown 0.12.3",
|
|
||||||
"lock_api",
|
|
||||||
"once_cell",
|
|
||||||
"parking_lot_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive_builder"
|
name = "derive_builder"
|
||||||
version = "0.11.2"
|
version = "0.11.2"
|
||||||
@@ -1126,7 +1101,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dump"
|
name = "dump"
|
||||||
version = "1.0.0"
|
version = "0.30.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"big_s",
|
"big_s",
|
||||||
@@ -1335,7 +1310,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "file-store"
|
name = "file-store"
|
||||||
version = "1.0.0"
|
version = "0.30.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"faux",
|
"faux",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@@ -1357,8 +1332,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filter-parser"
|
name = "filter-parser"
|
||||||
version = "0.37.3"
|
version = "0.37.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.3#2101e3c6d592f6ce6cc25b6e4585f3a8a6246457"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"nom",
|
"nom",
|
||||||
"nom_locate",
|
"nom_locate",
|
||||||
@@ -1376,8 +1351,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flatten-serde-json"
|
name = "flatten-serde-json"
|
||||||
version = "0.37.3"
|
version = "0.37.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.3#2101e3c6d592f6ce6cc25b6e4585f3a8a6246457"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
@@ -1474,12 +1449,6 @@ version = "0.3.25"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea"
|
checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "futures-timer"
|
|
||||||
version = "3.0.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-util"
|
name = "futures-util"
|
||||||
version = "0.3.25"
|
version = "0.3.25"
|
||||||
@@ -1531,7 +1500,7 @@ checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
"wasi",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1571,23 +1540,6 @@ version = "0.3.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
|
checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "governor"
|
|
||||||
version = "0.4.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "19775995ee20209163239355bc3ad2f33f83da35d9ef72dea26e5af753552c87"
|
|
||||||
dependencies = [
|
|
||||||
"dashmap",
|
|
||||||
"futures",
|
|
||||||
"futures-timer",
|
|
||||||
"no-std-compat",
|
|
||||||
"nonzero_ext",
|
|
||||||
"parking_lot",
|
|
||||||
"quanta",
|
|
||||||
"rand",
|
|
||||||
"smallvec",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "grenad"
|
name = "grenad"
|
||||||
version = "0.4.4"
|
version = "0.4.4"
|
||||||
@@ -1673,7 +1625,7 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
"lmdb-rkv-sys",
|
"lmdb-rkv-sys",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"page_size 0.4.2",
|
"page_size",
|
||||||
"synchronoise",
|
"synchronoise",
|
||||||
"url",
|
"url",
|
||||||
"zerocopy",
|
"zerocopy",
|
||||||
@@ -1815,7 +1767,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "index-scheduler"
|
name = "index-scheduler"
|
||||||
version = "1.0.0"
|
version = "0.30.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"big_s",
|
"big_s",
|
||||||
@@ -1831,7 +1783,6 @@ dependencies = [
|
|||||||
"meili-snap",
|
"meili-snap",
|
||||||
"meilisearch-types",
|
"meilisearch-types",
|
||||||
"nelson",
|
"nelson",
|
||||||
"page_size 0.5.0",
|
|
||||||
"roaring",
|
"roaring",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@@ -1946,8 +1897,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "json-depth-checker"
|
name = "json-depth-checker"
|
||||||
version = "0.37.3"
|
version = "0.37.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.3#2101e3c6d592f6ce6cc25b6e4585f3a8a6246457"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_json",
|
"serde_json",
|
||||||
]
|
]
|
||||||
@@ -2204,7 +2155,7 @@ checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d"
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "lmdb-rkv-sys"
|
name = "lmdb-rkv-sys"
|
||||||
version = "0.15.1"
|
version = "0.15.1"
|
||||||
source = "git+https://github.com/meilisearch/lmdb-rs#0144fb2bac524cdc2897d7750681ed3fff2dc3ac"
|
source = "git+https://github.com/meilisearch/lmdb-rs#5592bf5a812905cf0c633404ef8f8f4057112c65"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
@@ -2280,15 +2231,6 @@ dependencies = [
|
|||||||
"crc",
|
"crc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "mach"
|
|
||||||
version = "0.3.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "manifest-dir-macros"
|
name = "manifest-dir-macros"
|
||||||
version = "0.1.16"
|
version = "0.1.16"
|
||||||
@@ -2315,7 +2257,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meili-snap"
|
name = "meili-snap"
|
||||||
version = "1.0.0"
|
version = "0.30.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"insta",
|
"insta",
|
||||||
"md5",
|
"md5",
|
||||||
@@ -2323,11 +2265,27 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch"
|
name = "meilisearch-auth"
|
||||||
version = "0.30.1"
|
version = "0.30.0"
|
||||||
|
dependencies = [
|
||||||
|
"enum-iterator",
|
||||||
|
"hmac",
|
||||||
|
"meilisearch-types",
|
||||||
|
"rand",
|
||||||
|
"roaring",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"sha2",
|
||||||
|
"thiserror",
|
||||||
|
"time",
|
||||||
|
"uuid 1.2.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "meilisearch-http"
|
||||||
|
version = "0.30.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-governor",
|
|
||||||
"actix-http",
|
"actix-http",
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
"actix-web",
|
"actix-web",
|
||||||
@@ -2406,39 +2364,19 @@ dependencies = [
|
|||||||
"zip",
|
"zip",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "meilisearch-auth"
|
|
||||||
version = "1.0.0"
|
|
||||||
dependencies = [
|
|
||||||
"base64",
|
|
||||||
"enum-iterator",
|
|
||||||
"hmac",
|
|
||||||
"meilisearch-types",
|
|
||||||
"rand",
|
|
||||||
"roaring",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"sha2",
|
|
||||||
"thiserror",
|
|
||||||
"time",
|
|
||||||
"uuid 1.2.1",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch-types"
|
name = "meilisearch-types"
|
||||||
version = "1.0.0"
|
version = "0.30.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"csv",
|
"csv",
|
||||||
"either",
|
"either",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
"file-store",
|
|
||||||
"flate2",
|
"flate2",
|
||||||
"fst",
|
"fst",
|
||||||
"insta",
|
"insta",
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
"memmap2",
|
|
||||||
"milli",
|
"milli",
|
||||||
"proptest",
|
"proptest",
|
||||||
"proptest-derive",
|
"proptest-derive",
|
||||||
@@ -2446,7 +2384,6 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tar",
|
"tar",
|
||||||
"tempfile",
|
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
@@ -2479,8 +2416,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
version = "0.37.3"
|
version = "0.37.0"
|
||||||
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.3#2101e3c6d592f6ce6cc25b6e4585f3a8a6246457"
|
source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bimap",
|
"bimap",
|
||||||
"bincode",
|
"bincode",
|
||||||
@@ -2570,7 +2507,7 @@ checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"log",
|
"log",
|
||||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
"wasi",
|
||||||
"windows-sys 0.42.0",
|
"windows-sys 0.42.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -2594,12 +2531,6 @@ name = "nelson"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
source = "git+https://github.com/meilisearch/nelson.git?rev=675f13885548fb415ead8fbb447e9e6d9314000a#675f13885548fb415ead8fbb447e9e6d9314000a"
|
source = "git+https://github.com/meilisearch/nelson.git?rev=675f13885548fb415ead8fbb447e9e6d9314000a#675f13885548fb415ead8fbb447e9e6d9314000a"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "no-std-compat"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nom"
|
name = "nom"
|
||||||
version = "7.1.1"
|
version = "7.1.1"
|
||||||
@@ -2621,12 +2552,6 @@ dependencies = [
|
|||||||
"nom",
|
"nom",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "nonzero_ext"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ntapi"
|
name = "ntapi"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
@@ -2738,16 +2663,6 @@ dependencies = [
|
|||||||
"winapi",
|
"winapi",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "page_size"
|
|
||||||
version = "0.5.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1b7663cbd190cfd818d08efa8497f6cd383076688c49a391ef7c0d03cd12b561"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parking_lot"
|
name = "parking_lot"
|
||||||
version = "0.12.1"
|
version = "0.12.1"
|
||||||
@@ -2832,7 +2747,7 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "permissive-json-pointer"
|
name = "permissive-json-pointer"
|
||||||
version = "1.0.0"
|
version = "0.30.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"big_s",
|
"big_s",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@@ -3062,22 +2977,6 @@ version = "2.28.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quanta"
|
|
||||||
version = "0.9.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "20afe714292d5e879d8b12740aa223c6a88f118af41870e8b6196e39a02238a8"
|
|
||||||
dependencies = [
|
|
||||||
"crossbeam-utils",
|
|
||||||
"libc",
|
|
||||||
"mach",
|
|
||||||
"once_cell",
|
|
||||||
"raw-cpuid",
|
|
||||||
"wasi 0.10.2+wasi-snapshot-preview1",
|
|
||||||
"web-sys",
|
|
||||||
"winapi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quick-error"
|
name = "quick-error"
|
||||||
version = "1.2.3"
|
version = "1.2.3"
|
||||||
@@ -3147,15 +3046,6 @@ dependencies = [
|
|||||||
"rand_core",
|
"rand_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "raw-cpuid"
|
|
||||||
version = "10.6.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a6823ea29436221176fe662da99998ad3b4db2c7f31e7b6f5fe43adccd6320bb"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon"
|
name = "rayon"
|
||||||
version = "1.5.3"
|
version = "1.5.3"
|
||||||
@@ -4178,12 +4068,6 @@ dependencies = [
|
|||||||
"try-lock",
|
"try-lock",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasi"
|
|
||||||
version = "0.10.2+wasi-snapshot-preview1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasi"
|
name = "wasi"
|
||||||
version = "0.11.0+wasi-snapshot-preview1"
|
version = "0.11.0+wasi-snapshot-preview1"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
"meilisearch",
|
"meilisearch-http",
|
||||||
"meilisearch-types",
|
"meilisearch-types",
|
||||||
"meilisearch-auth",
|
"meilisearch-auth",
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ WORKDIR /meilisearch
|
|||||||
|
|
||||||
ARG COMMIT_SHA
|
ARG COMMIT_SHA
|
||||||
ARG COMMIT_DATE
|
ARG COMMIT_DATE
|
||||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE}
|
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE}
|
||||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|||||||
14
README.md
14
README.md
@@ -9,7 +9,7 @@
|
|||||||
<a href="https://blog.meilisearch.com">Blog</a> |
|
<a href="https://blog.meilisearch.com">Blog</a> |
|
||||||
<a href="https://docs.meilisearch.com">Documentation</a> |
|
<a href="https://docs.meilisearch.com">Documentation</a> |
|
||||||
<a href="https://docs.meilisearch.com/faq/">FAQ</a> |
|
<a href="https://docs.meilisearch.com/faq/">FAQ</a> |
|
||||||
<a href="https://discord.meilisearch.com">Discord</a>
|
<a href="https://slack.meilisearch.com">Slack</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
@@ -34,6 +34,14 @@ Meilisearch helps you shape a delightful search experience in a snap, offering f
|
|||||||
|
|
||||||
🔥 [**Try it!**](https://where2watch.meilisearch.com/) 🔥
|
🔥 [**Try it!**](https://where2watch.meilisearch.com/) 🔥
|
||||||
|
|
||||||
|
## 🎃 Hacktoberfest
|
||||||
|
|
||||||
|
It’s Hacktoberfest 2022 @Meilisearch
|
||||||
|
|
||||||
|
[Hacktoberfest](https://hacktoberfest.com/) is a celebration of the open-source community. This year, and for the third time in a row, Meilisearch is participating in this fantastic event.
|
||||||
|
|
||||||
|
You’d like to contribute? Don’t hesitate to check out our [contributing guidelines](./CONTRIBUTING.md).
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||||
@@ -61,7 +69,7 @@ You may also want to check out [Meilisearch 101](https://docs.meilisearch.com/le
|
|||||||
|
|
||||||
## ☁️ Meilisearch cloud
|
## ☁️ Meilisearch cloud
|
||||||
|
|
||||||
Let us manage your infrastructure so you can focus on integrating a great search experience. Try [Meilisearch Cloud](https://meilisearch.com/pricing) today.
|
Join the closed beta for Meilisearch cloud by filling out [this form](https://meilisearch.typeform.com/to/VI2cI2rv).
|
||||||
|
|
||||||
## 🧰 SDKs & integration tools
|
## 🧰 SDKs & integration tools
|
||||||
|
|
||||||
@@ -97,7 +105,7 @@ Meilisearch is a search engine created by [Meili](https://www.welcometothejungle
|
|||||||
|
|
||||||
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
||||||
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
||||||
- Want to be part of our Discord community? [Join us!](https://discord.gg/meilisearch)
|
- Want to be part of our Slack community? [Join us!](https://slack.meilisearch.com/)
|
||||||
- For everything else, please check [this page listing some of the other places where you can find us](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html)
|
- For everything else, please check [this page listing some of the other places where you can find us](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html)
|
||||||
|
|
||||||
Thank you for your support!
|
Thank you for your support!
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
status = [
|
status = [
|
||||||
'Tests on ubuntu-18.04',
|
'Tests on ubuntu-18.04',
|
||||||
'Tests on macos-12',
|
'Tests on macos-latest',
|
||||||
'Tests on windows-2022',
|
'Tests on windows-latest',
|
||||||
'Run Clippy',
|
'Run Clippy',
|
||||||
'Run Rustfmt',
|
'Run Rustfmt',
|
||||||
'Run tests in debug',
|
'Run tests in debug',
|
||||||
|
|||||||
65
config.toml
65
config.toml
@@ -56,7 +56,7 @@ disable_auto_batching = false
|
|||||||
### DUMPS ###
|
### DUMPS ###
|
||||||
#############
|
#############
|
||||||
|
|
||||||
dump_dir = "dumps/"
|
dumps_dir = "dumps/"
|
||||||
# Sets the directory where Meilisearch will create dump files.
|
# Sets the directory where Meilisearch will create dump files.
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#dumps-destination
|
# https://docs.meilisearch.com/learn/configuration/instance_options.html#dumps-destination
|
||||||
|
|
||||||
@@ -73,69 +73,6 @@ ignore_dump_if_db_exists = false
|
|||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-dump-if-db-exists
|
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-dump-if-db-exists
|
||||||
|
|
||||||
|
|
||||||
#####################
|
|
||||||
### RATE LIMITING ###
|
|
||||||
#####################
|
|
||||||
|
|
||||||
rate_limiting_disable_all = false
|
|
||||||
# Prevents a Meilisearch instance from performing any rate limiting.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-all
|
|
||||||
|
|
||||||
rate_limiting_disable_global = false
|
|
||||||
# Prevents a Meilisearch instance from performing rate limiting global to all queries.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-global
|
|
||||||
|
|
||||||
rate_limiting_global_pool = 100000
|
|
||||||
# The maximum pool of search requests that can be performed before they are rejected.
|
|
||||||
#
|
|
||||||
# The pool starts full at the provided value, then each search request diminishes the pool by 1.
|
|
||||||
# When the pool is empty the search request is rejected.
|
|
||||||
# The pool is replenished by 1 depending on the cooldown period.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-global-pool
|
|
||||||
|
|
||||||
rate_limiting_global_cooldown_ns = 50000
|
|
||||||
# The amount of time, in nanoseconds, before the pool of available search requests is replenished by 1 again.
|
|
||||||
#
|
|
||||||
# The maximum number of available search requests is given by `rate_limiting_global_pool`.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-global-cooldown-ns
|
|
||||||
|
|
||||||
rate_limiting_disable_ip = false
|
|
||||||
# Prevents a Meilisearch instance from performing rate limiting per IP address.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-ip
|
|
||||||
|
|
||||||
rate_limiting_ip_pool = 200
|
|
||||||
# The maximum pool of search requests that can be performed from a specific IP before they are rejected.
|
|
||||||
#
|
|
||||||
# The pool starts full at the provided value, then each search request from the same IP address diminishes the pool by 1.
|
|
||||||
# When the pool is empty the search request is rejected.
|
|
||||||
# The pool is replenished by 1 depending on the cooldown period.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-ip-pool
|
|
||||||
|
|
||||||
rate_limiting_ip_cooldown_ns = 50000000
|
|
||||||
# The amount of time, in nanoseconds, before the pool of available search requests for a specific IP address is replenished by 1 again.
|
|
||||||
#
|
|
||||||
# The maximum number of available search requests for a specific IP address is given by `rate_limiting_ip_pool`.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-ip-cooldown-ns
|
|
||||||
|
|
||||||
rate_limiting_disable_api_key = false
|
|
||||||
# Prevents a Meilisearch instance from performing rate limiting per API key.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-api-key
|
|
||||||
|
|
||||||
rate_limiting_api_key_pool = 10000
|
|
||||||
# The maximum pool of search requests that can be performed using a specific API key before they are rejected.
|
|
||||||
#
|
|
||||||
# The pool starts full at the provided value, then each search request using the same API key diminishes the pool by 1.
|
|
||||||
# When the pool is empty the search request is rejected.
|
|
||||||
# The pool is replenished by 1 depending on the cooldown period.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-api-key-pool
|
|
||||||
|
|
||||||
rate_limiting_api_key_cooldown_ns = 500000
|
|
||||||
# The amount of time, in nanoseconds, before the pool of available search requests using a specific API key is replenished by 1 again.
|
|
||||||
#
|
|
||||||
# The maximum number of available search requests using a specific API key is given by `rate_limiting_api_key_pool`.
|
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-api-key-cooldown-ns
|
|
||||||
|
|
||||||
|
|
||||||
#################
|
#################
|
||||||
### SNAPSHOTS ###
|
### SNAPSHOTS ###
|
||||||
#################
|
#################
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
# This script can optionally use a GitHub token to increase your request limit (for example, if using this script in a CI).
|
|
||||||
# To use a GitHub token, pass it through the GITHUB_PAT environment variable.
|
|
||||||
|
|
||||||
# GLOBALS
|
# GLOBALS
|
||||||
|
|
||||||
# Colors
|
# Colors
|
||||||
@@ -13,6 +10,9 @@ DEFAULT='\033[0m'
|
|||||||
# Project name
|
# Project name
|
||||||
PNAME='meilisearch'
|
PNAME='meilisearch'
|
||||||
|
|
||||||
|
# Version regexp i.e. v[number].[number].[number]
|
||||||
|
GREP_SEMVER_REGEXP='v\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)$'
|
||||||
|
|
||||||
# GitHub API address
|
# GitHub API address
|
||||||
GITHUB_API='https://api.github.com/repos/meilisearch/meilisearch/releases'
|
GITHUB_API='https://api.github.com/repos/meilisearch/meilisearch/releases'
|
||||||
# GitHub Release address
|
# GitHub Release address
|
||||||
@@ -20,26 +20,126 @@ GITHUB_REL='https://github.com/meilisearch/meilisearch/releases/download/'
|
|||||||
|
|
||||||
# FUNCTIONS
|
# FUNCTIONS
|
||||||
|
|
||||||
# Gets the version of the latest stable version of Meilisearch by setting the $latest variable.
|
# semverParseInto and semverLT from: https://github.com/cloudflare/semver_bash/blob/master/semver.sh
|
||||||
# Returns 0 in case of success, 1 otherwise.
|
# usage: semverParseInto version major minor patch special
|
||||||
|
# version: the string version
|
||||||
|
# major, minor, patch, special: will be assigned by the function
|
||||||
|
semverParseInto() {
|
||||||
|
local RE='[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)'
|
||||||
|
# MAJOR
|
||||||
|
eval $2=`echo $1 | sed -e "s#$RE#\1#"`
|
||||||
|
# MINOR
|
||||||
|
eval $3=`echo $1 | sed -e "s#$RE#\2#"`
|
||||||
|
# PATCH
|
||||||
|
eval $4=`echo $1 | sed -e "s#$RE#\3#"`
|
||||||
|
# SPECIAL
|
||||||
|
eval $5=`echo $1 | sed -e "s#$RE#\4#"`
|
||||||
|
}
|
||||||
|
|
||||||
|
# usage: semverLT version1 version2
|
||||||
|
semverLT() {
|
||||||
|
local MAJOR_A=0
|
||||||
|
local MINOR_A=0
|
||||||
|
local PATCH_A=0
|
||||||
|
local SPECIAL_A=0
|
||||||
|
|
||||||
|
local MAJOR_B=0
|
||||||
|
local MINOR_B=0
|
||||||
|
local PATCH_B=0
|
||||||
|
local SPECIAL_B=0
|
||||||
|
|
||||||
|
semverParseInto $1 MAJOR_A MINOR_A PATCH_A SPECIAL_A
|
||||||
|
semverParseInto $2 MAJOR_B MINOR_B PATCH_B SPECIAL_B
|
||||||
|
|
||||||
|
if [ $MAJOR_A -lt $MAJOR_B ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ $MAJOR_A -le $MAJOR_B ] && [ $MINOR_A -lt $MINOR_B ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ $MAJOR_A -le $MAJOR_B ] && [ $MINOR_A -le $MINOR_B ] && [ $PATCH_A -lt $PATCH_B ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" == '_' ] && [ "_$SPECIAL_B" == '_' ] ; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" == '_' ] && [ "_$SPECIAL_B" != '_' ] ; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" != '_' ] && [ "_$SPECIAL_B" == '_' ] ; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
if [ "_$SPECIAL_A" < "_$SPECIAL_B" ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get a token from: https://github.com/settings/tokens to increase rate limit (from 60 to 5000),
|
||||||
|
# make sure the token scope is set to 'public_repo'.
|
||||||
|
# Create GITHUB_PAT environment variable once you acquired the token to start using it.
|
||||||
|
# Returns the tag of the latest stable release (in terms of semver and not of release date).
|
||||||
get_latest() {
|
get_latest() {
|
||||||
# temp_file is needed because the grep would start before the download is over
|
# temp_file is needed because the grep would start before the download is over
|
||||||
temp_file=$(mktemp -q /tmp/$PNAME.XXXXXXXXX)
|
temp_file=$(mktemp -q /tmp/$PNAME.XXXXXXXXX)
|
||||||
latest_release="$GITHUB_API/latest"
|
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
if [ $? -ne 0 ]; then
|
||||||
echo "$0: Can't create temp file."
|
echo "$0: Can't create temp file, bye bye.."
|
||||||
fetch_release_failure_usage
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$GITHUB_PAT" ]; then
|
if [ -z "$GITHUB_PAT" ]; then
|
||||||
curl -s "$latest_release" > "$temp_file" || return 1
|
curl -s $GITHUB_API > "$temp_file" || return 1
|
||||||
else
|
else
|
||||||
curl -H "Authorization: token $GITHUB_PAT" -s "$latest_release" > "$temp_file" || return 1
|
curl -H "Authorization: token $GITHUB_PAT" -s $GITHUB_API > "$temp_file" || return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
latest="$(cat "$temp_file" | grep '"tag_name":' | cut -d ':' -f2 | tr -d '"' | tr -d ',' | tr -d ' ')"
|
releases=$(cat "$temp_file" | \
|
||||||
|
grep -E '"tag_name":|"draft":|"prerelease":' \
|
||||||
|
| tr -d ',"' | cut -d ':' -f2 | tr -d ' ')
|
||||||
|
# Returns a list of [tag_name draft_boolean prerelease_boolean ...]
|
||||||
|
# Ex: v0.10.1 false false v0.9.1-rc.1 false true v0.9.0 false false...
|
||||||
|
|
||||||
|
i=0
|
||||||
|
latest=''
|
||||||
|
current_tag=''
|
||||||
|
for release_info in $releases; do
|
||||||
|
# Checking tag_name
|
||||||
|
if [ $i -eq 0 ]; then
|
||||||
|
# If it's not an alpha or beta release
|
||||||
|
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then
|
||||||
|
current_tag=$release_info
|
||||||
|
else
|
||||||
|
current_tag=''
|
||||||
|
fi
|
||||||
|
i=1
|
||||||
|
# Checking draft boolean
|
||||||
|
elif [ $i -eq 1 ]; then
|
||||||
|
if [ "$release_info" = 'true' ]; then
|
||||||
|
current_tag=''
|
||||||
|
fi
|
||||||
|
i=2
|
||||||
|
# Checking prerelease boolean
|
||||||
|
elif [ $i -eq 2 ]; then
|
||||||
|
if [ "$release_info" = 'true' ]; then
|
||||||
|
current_tag=''
|
||||||
|
fi
|
||||||
|
i=0
|
||||||
|
# If the current_tag is valid
|
||||||
|
if [ "$current_tag" != '' ]; then
|
||||||
|
# If there is no latest yes
|
||||||
|
if [ "$latest" = '' ]; then
|
||||||
|
latest="$current_tag"
|
||||||
|
else
|
||||||
|
# Comparing latest and the current tag
|
||||||
|
semverLT $current_tag $latest
|
||||||
|
if [ $? -eq 1 ]; then
|
||||||
|
latest="$current_tag"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
rm -f "$temp_file"
|
rm -f "$temp_file"
|
||||||
return 0
|
return 0
|
||||||
@@ -74,9 +174,9 @@ get_archi() {
|
|||||||
archi='amd64'
|
archi='amd64'
|
||||||
;;
|
;;
|
||||||
'arm64')
|
'arm64')
|
||||||
# macOS M1/M2
|
# MacOS M1
|
||||||
if [ $os = 'macos' ]; then
|
if [ $os = 'macos' ]; then
|
||||||
archi='apple-silicon'
|
archi='amd64'
|
||||||
else
|
else
|
||||||
archi='aarch64'
|
archi='aarch64'
|
||||||
fi
|
fi
|
||||||
@@ -110,13 +210,12 @@ fetch_release_failure_usage() {
|
|||||||
echo ''
|
echo ''
|
||||||
printf "$RED%s\n$DEFAULT" 'ERROR: Impossible to get the latest stable version of Meilisearch.'
|
printf "$RED%s\n$DEFAULT" 'ERROR: Impossible to get the latest stable version of Meilisearch.'
|
||||||
echo 'Please let us know about this issue: https://github.com/meilisearch/meilisearch/issues/new/choose'
|
echo 'Please let us know about this issue: https://github.com/meilisearch/meilisearch/issues/new/choose'
|
||||||
echo ''
|
|
||||||
echo 'In the meantime, you can manually download the appropriate binary from the GitHub release assets here: https://github.com/meilisearch/meilisearch/releases/latest'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fill_release_variables() {
|
fill_release_variables() {
|
||||||
# Fill $latest variable.
|
# Fill $latest variable.
|
||||||
if ! get_latest; then
|
if ! get_latest; then
|
||||||
|
# TO CHANGE.
|
||||||
fetch_release_failure_usage
|
fetch_release_failure_usage
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "dump"
|
name = "dump"
|
||||||
version = "1.0.0"
|
version = "0.30.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ use thiserror::Error;
|
|||||||
|
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
#[error("The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.")]
|
||||||
|
DumpV1Unsupported,
|
||||||
#[error("Bad index name.")]
|
#[error("Bad index name.")]
|
||||||
BadIndexName,
|
BadIndexName,
|
||||||
#[error("Malformed task.")]
|
#[error("Malformed task.")]
|
||||||
@@ -19,14 +21,14 @@ pub enum Error {
|
|||||||
impl ErrorCode for Error {
|
impl ErrorCode for Error {
|
||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
Error::Io(e) => e.error_code(),
|
// Are these three really Internal errors?
|
||||||
|
// TODO look at that later.
|
||||||
// These errors either happen when creating a dump and don't need any error code,
|
Error::Io(_) => Code::Internal,
|
||||||
// or come from an internal bad deserialization.
|
|
||||||
Error::Serde(_) => Code::Internal,
|
Error::Serde(_) => Code::Internal,
|
||||||
Error::Uuid(_) => Code::Internal,
|
Error::Uuid(_) => Code::Internal,
|
||||||
|
|
||||||
// all these errors should never be raised when creating a dump, thus no error code should be associated.
|
// all these errors should never be raised when creating a dump, thus no error code should be associated.
|
||||||
|
Error::DumpV1Unsupported => Code::Internal,
|
||||||
Error::BadIndexName => Code::Internal,
|
Error::BadIndexName => Code::Internal,
|
||||||
Error::MalformedTask => Code::Internal,
|
Error::MalformedTask => Code::Internal,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ const CURRENT_DUMP_VERSION: Version = Version::V6;
|
|||||||
|
|
||||||
type Result<T> = std::result::Result<T, Error>;
|
type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
pub dump_version: Version,
|
pub dump_version: Version,
|
||||||
@@ -32,7 +32,7 @@ pub struct Metadata {
|
|||||||
pub dump_date: OffsetDateTime,
|
pub dump_date: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct IndexMetadata {
|
pub struct IndexMetadata {
|
||||||
pub uid: String,
|
pub uid: String,
|
||||||
@@ -43,7 +43,7 @@ pub struct IndexMetadata {
|
|||||||
pub updated_at: OffsetDateTime,
|
pub updated_at: OffsetDateTime,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
|
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||||
pub enum Version {
|
pub enum Version {
|
||||||
V1,
|
V1,
|
||||||
V2,
|
V2,
|
||||||
@@ -416,6 +416,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn test_creating_and_read_dump() {
|
fn test_creating_and_read_dump() {
|
||||||
let mut file = create_test_dump();
|
let mut file = create_test_dump();
|
||||||
let mut dump = DumpReader::open(&mut file).unwrap();
|
let mut dump = DumpReader::open(&mut file).unwrap();
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
pub mod v1_to_v2;
|
|
||||||
pub mod v2_to_v3;
|
pub mod v2_to_v3;
|
||||||
pub mod v3_to_v4;
|
pub mod v3_to_v4;
|
||||||
pub mod v4_to_v5;
|
pub mod v4_to_v5;
|
||||||
|
|||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v1_to_v2.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v1_to_v2.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v1_to_v2.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness",
|
|
||||||
"asc(release_date)"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v1_to_v2.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness",
|
|
||||||
"asc(release_date)"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v1_to_v2.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v2_to_v3.rs
|
|
||||||
expression: movies2.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v2_to_v3.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v2_to_v3.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v2_to_v3.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v3_to_v4.rs
|
|
||||||
expression: movies2.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v3_to_v4.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v3_to_v4.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v3_to_v4.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"sortableAttributes": [
|
|
||||||
"release_date"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v4_to_v5.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": "Reset",
|
|
||||||
"searchableAttributes": "Reset",
|
|
||||||
"filterableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"sortableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"rankingRules": {
|
|
||||||
"Set": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"stopWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"synonyms": {
|
|
||||||
"Set": {}
|
|
||||||
},
|
|
||||||
"distinctAttribute": "Reset",
|
|
||||||
"typoTolerance": {
|
|
||||||
"Set": {
|
|
||||||
"enabled": {
|
|
||||||
"Set": true
|
|
||||||
},
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"Set": {
|
|
||||||
"oneTypo": {
|
|
||||||
"Set": 5
|
|
||||||
},
|
|
||||||
"twoTypos": {
|
|
||||||
"Set": 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"disableOnWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"disableOnAttributes": {
|
|
||||||
"Set": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"faceting": "NotSet",
|
|
||||||
"pagination": "NotSet"
|
|
||||||
}
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v4_to_v5.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": "Reset",
|
|
||||||
"searchableAttributes": "Reset",
|
|
||||||
"filterableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"sortableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"rankingRules": {
|
|
||||||
"Set": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"stopWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"synonyms": {
|
|
||||||
"Set": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"distinctAttribute": "Reset",
|
|
||||||
"typoTolerance": {
|
|
||||||
"Set": {
|
|
||||||
"enabled": {
|
|
||||||
"Set": true
|
|
||||||
},
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"Set": {
|
|
||||||
"oneTypo": {
|
|
||||||
"Set": 5
|
|
||||||
},
|
|
||||||
"twoTypos": {
|
|
||||||
"Set": 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"disableOnWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"disableOnAttributes": {
|
|
||||||
"Set": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"faceting": "NotSet",
|
|
||||||
"pagination": "NotSet"
|
|
||||||
}
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v4_to_v5.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": "Reset",
|
|
||||||
"searchableAttributes": "Reset",
|
|
||||||
"filterableAttributes": {
|
|
||||||
"Set": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"sortableAttributes": {
|
|
||||||
"Set": [
|
|
||||||
"release_date"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"rankingRules": {
|
|
||||||
"Set": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"stopWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"synonyms": {
|
|
||||||
"Set": {}
|
|
||||||
},
|
|
||||||
"distinctAttribute": "Reset",
|
|
||||||
"typoTolerance": {
|
|
||||||
"Set": {
|
|
||||||
"enabled": {
|
|
||||||
"Set": true
|
|
||||||
},
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"Set": {
|
|
||||||
"oneTypo": {
|
|
||||||
"Set": 5
|
|
||||||
},
|
|
||||||
"twoTypos": {
|
|
||||||
"Set": 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"disableOnWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"disableOnAttributes": {
|
|
||||||
"Set": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"faceting": "NotSet",
|
|
||||||
"pagination": "NotSet"
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v5_to_v6.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"maxValuesPerFacet": 100
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"maxTotalHits": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v5_to_v6.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"maxValuesPerFacet": 100
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"maxTotalHits": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/compat/v5_to_v6.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"sortableAttributes": [
|
|
||||||
"release_date"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"maxValuesPerFacet": 100
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"maxTotalHits": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,414 +0,0 @@
|
|||||||
use std::collections::BTreeSet;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use super::v2_to_v3::CompatV2ToV3;
|
|
||||||
use crate::reader::{v1, v2, Document};
|
|
||||||
use crate::Result;
|
|
||||||
|
|
||||||
pub struct CompatV1ToV2 {
|
|
||||||
pub from: v1::V1Reader,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CompatV1ToV2 {
|
|
||||||
pub fn new(v1: v1::V1Reader) -> Self {
|
|
||||||
Self { from: v1 }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn to_v3(self) -> CompatV2ToV3 {
|
|
||||||
CompatV2ToV3::Compat(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn version(&self) -> crate::Version {
|
|
||||||
self.from.version()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn date(&self) -> Option<time::OffsetDateTime> {
|
|
||||||
self.from.date()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn index_uuid(&self) -> Vec<v2::meta::IndexUuid> {
|
|
||||||
self.from
|
|
||||||
.index_uuid()
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
// we use the index of the index 😬 as UUID for the index, so that we can link the v2::Task to their index
|
|
||||||
.map(|(index, index_uuid)| v2::meta::IndexUuid {
|
|
||||||
uid: index_uuid.uid,
|
|
||||||
uuid: uuid::Uuid::from_u128(index as u128),
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<CompatIndexV1ToV2>> + '_> {
|
|
||||||
Ok(self.from.indexes()?.map(|index_reader| Ok(CompatIndexV1ToV2 { from: index_reader? })))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tasks(
|
|
||||||
&mut self,
|
|
||||||
) -> Box<dyn Iterator<Item = Result<(v2::Task, Option<v2::UpdateFile>)>> + '_> {
|
|
||||||
// Convert an error here to an iterator yielding the error
|
|
||||||
let indexes = match self.from.indexes() {
|
|
||||||
Ok(indexes) => indexes,
|
|
||||||
Err(err) => return Box::new(std::iter::once(Err(err))),
|
|
||||||
};
|
|
||||||
let it = indexes.enumerate().flat_map(
|
|
||||||
move |(index, index_reader)| -> Box<dyn Iterator<Item = _>> {
|
|
||||||
let index_reader = match index_reader {
|
|
||||||
Ok(index_reader) => index_reader,
|
|
||||||
Err(err) => return Box::new(std::iter::once(Err(err))),
|
|
||||||
};
|
|
||||||
Box::new(
|
|
||||||
index_reader
|
|
||||||
.tasks()
|
|
||||||
// Filter out the UpdateStatus::Customs variant that is not supported in v2
|
|
||||||
// and enqueued tasks, that don't contain the necessary update file in v1
|
|
||||||
.filter_map(move |task| -> Option<_> {
|
|
||||||
let task = match task {
|
|
||||||
Ok(task) => task,
|
|
||||||
Err(err) => return Some(Err(err)),
|
|
||||||
};
|
|
||||||
Some(Ok((
|
|
||||||
v2::Task {
|
|
||||||
uuid: uuid::Uuid::from_u128(index as u128),
|
|
||||||
update: Option::from(task)?,
|
|
||||||
},
|
|
||||||
None,
|
|
||||||
)))
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
},
|
|
||||||
);
|
|
||||||
Box::new(it)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct CompatIndexV1ToV2 {
|
|
||||||
pub from: v1::V1IndexReader,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CompatIndexV1ToV2 {
|
|
||||||
pub fn metadata(&self) -> &crate::IndexMetadata {
|
|
||||||
self.from.metadata()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn documents(&mut self) -> Result<Box<dyn Iterator<Item = Result<Document>> + '_>> {
|
|
||||||
self.from.documents().map(|it| Box::new(it) as Box<dyn Iterator<Item = _>>)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn settings(&mut self) -> Result<v2::settings::Settings<v2::settings::Checked>> {
|
|
||||||
Ok(v2::settings::Settings::<v2::settings::Unchecked>::from(self.from.settings()?).check())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
|
||||||
fn from(source: v1::settings::Settings) -> Self {
|
|
||||||
let displayed_attributes = source
|
|
||||||
.displayed_attributes
|
|
||||||
.map(|opt| opt.map(|displayed_attributes| displayed_attributes.into_iter().collect()));
|
|
||||||
let attributes_for_faceting = source.attributes_for_faceting.map(|opt| {
|
|
||||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
|
||||||
});
|
|
||||||
let ranking_rules = source.ranking_rules.map(|opt| {
|
|
||||||
opt.map(|ranking_rules| {
|
|
||||||
ranking_rules
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|ranking_rule| {
|
|
||||||
match v1::settings::RankingRule::from_str(&ranking_rule) {
|
|
||||||
Ok(ranking_rule) => {
|
|
||||||
let criterion: Option<v2::settings::Criterion> =
|
|
||||||
ranking_rule.into();
|
|
||||||
criterion.as_ref().map(ToString::to_string)
|
|
||||||
}
|
|
||||||
Err(()) => Some(ranking_rule),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
Self {
|
|
||||||
displayed_attributes,
|
|
||||||
searchable_attributes: source.searchable_attributes,
|
|
||||||
filterable_attributes: attributes_for_faceting,
|
|
||||||
ranking_rules,
|
|
||||||
stop_words: source.stop_words,
|
|
||||||
synonyms: source.synonyms,
|
|
||||||
distinct_attribute: source.distinct_attribute,
|
|
||||||
_kind: std::marker::PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
|
||||||
fn from(source: v1::update::UpdateStatus) -> Self {
|
|
||||||
use v1::update::UpdateStatus as UpdateStatusV1;
|
|
||||||
use v2::updates::UpdateStatus as UpdateStatusV2;
|
|
||||||
Some(match source {
|
|
||||||
UpdateStatusV1::Enqueued { content } => {
|
|
||||||
log::warn!(
|
|
||||||
"Cannot import task {} (importing enqueued tasks from v1 dumps is unsupported)",
|
|
||||||
content.update_id
|
|
||||||
);
|
|
||||||
log::warn!("Task will be skipped in the queue of imported tasks.");
|
|
||||||
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
UpdateStatusV1::Failed { content } => UpdateStatusV2::Failed(v2::updates::Failed {
|
|
||||||
from: v2::updates::Processing {
|
|
||||||
from: v2::updates::Enqueued {
|
|
||||||
update_id: content.update_id,
|
|
||||||
meta: Option::from(content.update_type)?,
|
|
||||||
enqueued_at: content.enqueued_at,
|
|
||||||
content: None,
|
|
||||||
},
|
|
||||||
started_processing_at: content.processed_at
|
|
||||||
- std::time::Duration::from_secs_f64(content.duration),
|
|
||||||
},
|
|
||||||
error: v2::ResponseError {
|
|
||||||
// error code is ignored by serialization, and so always default in deserialized v2 dumps
|
|
||||||
// that's a good thing, because we don't have them in v1 dump 😅
|
|
||||||
code: http::StatusCode::default(),
|
|
||||||
message: content.error.unwrap_or_default(),
|
|
||||||
// error codes are unchanged between v1 and v2
|
|
||||||
error_code: content.error_code.unwrap_or_default(),
|
|
||||||
// error types are unchanged between v1 and v2
|
|
||||||
error_type: content.error_type.unwrap_or_default(),
|
|
||||||
// error links are unchanged between v1 and v2
|
|
||||||
error_link: content.error_link.unwrap_or_default(),
|
|
||||||
},
|
|
||||||
failed_at: content.processed_at,
|
|
||||||
}),
|
|
||||||
UpdateStatusV1::Processed { content } => {
|
|
||||||
UpdateStatusV2::Processed(v2::updates::Processed {
|
|
||||||
success: match &content.update_type {
|
|
||||||
v1::update::UpdateType::ClearAll => {
|
|
||||||
v2::updates::UpdateResult::DocumentDeletion { deleted: u64::MAX }
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::Customs => v2::updates::UpdateResult::Other,
|
|
||||||
v1::update::UpdateType::DocumentsAddition { number } => {
|
|
||||||
v2::updates::UpdateResult::DocumentsAddition(
|
|
||||||
v2::updates::DocumentAdditionResult { nb_documents: *number },
|
|
||||||
)
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::DocumentsPartial { number } => {
|
|
||||||
v2::updates::UpdateResult::DocumentsAddition(
|
|
||||||
v2::updates::DocumentAdditionResult { nb_documents: *number },
|
|
||||||
)
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::DocumentsDeletion { number } => {
|
|
||||||
v2::updates::UpdateResult::DocumentDeletion { deleted: *number as u64 }
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::Settings { .. } => v2::updates::UpdateResult::Other,
|
|
||||||
},
|
|
||||||
processed_at: content.processed_at,
|
|
||||||
from: v2::updates::Processing {
|
|
||||||
from: v2::updates::Enqueued {
|
|
||||||
update_id: content.update_id,
|
|
||||||
meta: Option::from(content.update_type)?,
|
|
||||||
enqueued_at: content.enqueued_at,
|
|
||||||
content: None,
|
|
||||||
},
|
|
||||||
started_processing_at: content.processed_at
|
|
||||||
- std::time::Duration::from_secs_f64(content.duration),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<v1::update::UpdateType> for Option<v2::updates::UpdateMeta> {
|
|
||||||
fn from(source: v1::update::UpdateType) -> Self {
|
|
||||||
Some(match source {
|
|
||||||
v1::update::UpdateType::ClearAll => v2::updates::UpdateMeta::ClearDocuments,
|
|
||||||
v1::update::UpdateType::Customs => {
|
|
||||||
log::warn!("Ignoring task with type 'Customs' that is no longer supported");
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::DocumentsAddition { .. } => {
|
|
||||||
v2::updates::UpdateMeta::DocumentsAddition {
|
|
||||||
method: v2::updates::IndexDocumentsMethod::ReplaceDocuments,
|
|
||||||
format: v2::updates::UpdateFormat::Json,
|
|
||||||
primary_key: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::DocumentsPartial { .. } => {
|
|
||||||
v2::updates::UpdateMeta::DocumentsAddition {
|
|
||||||
method: v2::updates::IndexDocumentsMethod::UpdateDocuments,
|
|
||||||
format: v2::updates::UpdateFormat::Json,
|
|
||||||
primary_key: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::DocumentsDeletion { .. } => {
|
|
||||||
v2::updates::UpdateMeta::DeleteDocuments { ids: vec![] }
|
|
||||||
}
|
|
||||||
v1::update::UpdateType::Settings { settings } => {
|
|
||||||
v2::updates::UpdateMeta::Settings((*settings).into())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<v1::settings::SettingsUpdate> for v2::Settings<v2::Unchecked> {
|
|
||||||
fn from(source: v1::settings::SettingsUpdate) -> Self {
|
|
||||||
let displayed_attributes: Option<Option<BTreeSet<String>>> =
|
|
||||||
source.displayed_attributes.into();
|
|
||||||
|
|
||||||
let attributes_for_faceting: Option<Option<Vec<String>>> =
|
|
||||||
source.attributes_for_faceting.into();
|
|
||||||
|
|
||||||
let ranking_rules: Option<Option<Vec<v1::settings::RankingRule>>> =
|
|
||||||
source.ranking_rules.into();
|
|
||||||
|
|
||||||
// go from the concrete types of v1 (RankingRule) to the concrete type of v2 (Criterion),
|
|
||||||
// and then back to string as this is what the settings manipulate
|
|
||||||
let ranking_rules = ranking_rules.map(|opt| {
|
|
||||||
opt.map(|ranking_rules| {
|
|
||||||
ranking_rules
|
|
||||||
.into_iter()
|
|
||||||
// filter out the WordsPosition ranking rule that exists in v1 but not v2
|
|
||||||
.filter_map(|ranking_rule| {
|
|
||||||
Option::<v2::settings::Criterion>::from(ranking_rule)
|
|
||||||
})
|
|
||||||
.map(|criterion| criterion.to_string())
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
Self {
|
|
||||||
displayed_attributes: displayed_attributes.map(|opt| {
|
|
||||||
opt.map(|displayed_attributes| displayed_attributes.into_iter().collect())
|
|
||||||
}),
|
|
||||||
searchable_attributes: source.searchable_attributes.into(),
|
|
||||||
filterable_attributes: attributes_for_faceting.map(|opt| {
|
|
||||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
|
||||||
}),
|
|
||||||
ranking_rules,
|
|
||||||
stop_words: source.stop_words.into(),
|
|
||||||
synonyms: source.synonyms.into(),
|
|
||||||
distinct_attribute: source.distinct_attribute.into(),
|
|
||||||
_kind: std::marker::PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<v1::settings::RankingRule> for Option<v2::settings::Criterion> {
|
|
||||||
fn from(source: v1::settings::RankingRule) -> Self {
|
|
||||||
match source {
|
|
||||||
v1::settings::RankingRule::Typo => Some(v2::settings::Criterion::Typo),
|
|
||||||
v1::settings::RankingRule::Words => Some(v2::settings::Criterion::Words),
|
|
||||||
v1::settings::RankingRule::Proximity => Some(v2::settings::Criterion::Proximity),
|
|
||||||
v1::settings::RankingRule::Attribute => Some(v2::settings::Criterion::Attribute),
|
|
||||||
v1::settings::RankingRule::WordsPosition => {
|
|
||||||
log::warn!("Removing the 'WordsPosition' ranking rule that is no longer supported, please check the resulting ranking rules of your indexes");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
v1::settings::RankingRule::Exactness => Some(v2::settings::Criterion::Exactness),
|
|
||||||
v1::settings::RankingRule::Asc(field_name) => {
|
|
||||||
Some(v2::settings::Criterion::Asc(field_name))
|
|
||||||
}
|
|
||||||
v1::settings::RankingRule::Desc(field_name) => {
|
|
||||||
Some(v2::settings::Criterion::Desc(field_name))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> From<v1::settings::UpdateState<T>> for Option<Option<T>> {
|
|
||||||
fn from(source: v1::settings::UpdateState<T>) -> Self {
|
|
||||||
match source {
|
|
||||||
v1::settings::UpdateState::Update(new_value) => Some(Some(new_value)),
|
|
||||||
v1::settings::UpdateState::Clear => Some(None),
|
|
||||||
v1::settings::UpdateState::Nothing => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) mod test {
|
|
||||||
use std::fs::File;
|
|
||||||
use std::io::BufReader;
|
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
|
||||||
use meili_snap::insta;
|
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn compat_v1_v2() {
|
|
||||||
let dump = File::open("tests/assets/v1.dump").unwrap();
|
|
||||||
let dir = TempDir::new().unwrap();
|
|
||||||
let mut dump = BufReader::new(dump);
|
|
||||||
let gz = GzDecoder::new(&mut dump);
|
|
||||||
let mut archive = tar::Archive::new(gz);
|
|
||||||
archive.unpack(dir.path()).unwrap();
|
|
||||||
|
|
||||||
let mut dump = v1::V1Reader::open(dir).unwrap().to_v2();
|
|
||||||
|
|
||||||
// top level infos
|
|
||||||
assert_eq!(dump.date(), None);
|
|
||||||
|
|
||||||
// tasks
|
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"ad6245d98d1a8e30535f3339a9a8d223");
|
|
||||||
assert_eq!(update_files.len(), 9);
|
|
||||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dumps v1
|
|
||||||
|
|
||||||
// indexes
|
|
||||||
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
// the index are not ordered in any way by default
|
|
||||||
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
|
||||||
|
|
||||||
let mut products = indexes.pop().unwrap();
|
|
||||||
let mut movies = indexes.pop().unwrap();
|
|
||||||
let mut spells = indexes.pop().unwrap();
|
|
||||||
assert!(indexes.is_empty());
|
|
||||||
|
|
||||||
// products
|
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "products",
|
|
||||||
"primaryKey": "sku",
|
|
||||||
"createdAt": "2022-10-02T13:23:39.976870431Z",
|
|
||||||
"updatedAt": "2022-10-02T13:27:54.353262482Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
|
||||||
|
|
||||||
// movies
|
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "movies",
|
|
||||||
"primaryKey": "id",
|
|
||||||
"createdAt": "2022-10-02T13:15:29.477512777Z",
|
|
||||||
"updatedAt": "2022-10-02T13:21:12.671204856Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b63dbed5bbc059f3e32bc471ae699bf5");
|
|
||||||
|
|
||||||
// spells
|
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "dnd_spells",
|
|
||||||
"primaryKey": "index",
|
|
||||||
"createdAt": "2022-10-02T13:38:26.358882984Z",
|
|
||||||
"updatedAt": "2022-10-02T13:38:26.385609433Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"aa24c0cfc733d66c396237ad44263bed");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -4,28 +4,22 @@ use std::str::FromStr;
|
|||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::v1_to_v2::{CompatIndexV1ToV2, CompatV1ToV2};
|
|
||||||
use super::v3_to_v4::CompatV3ToV4;
|
use super::v3_to_v4::CompatV3ToV4;
|
||||||
use crate::reader::{v2, v3, Document};
|
use crate::reader::{v2, v3, Document};
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
||||||
pub enum CompatV2ToV3 {
|
pub struct CompatV2ToV3 {
|
||||||
V2(v2::V2Reader),
|
pub from: v2::V2Reader,
|
||||||
Compat(CompatV1ToV2),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompatV2ToV3 {
|
impl CompatV2ToV3 {
|
||||||
pub fn new(v2: v2::V2Reader) -> CompatV2ToV3 {
|
pub fn new(v2: v2::V2Reader) -> CompatV2ToV3 {
|
||||||
CompatV2ToV3::V2(v2)
|
CompatV2ToV3 { from: v2 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index_uuid(&self) -> Vec<v3::meta::IndexUuid> {
|
pub fn index_uuid(&self) -> Vec<v3::meta::IndexUuid> {
|
||||||
let v2_uuids = match self {
|
self.from
|
||||||
CompatV2ToV3::V2(from) => from.index_uuid(),
|
.index_uuid()
|
||||||
CompatV2ToV3::Compat(compat) => compat.index_uuid(),
|
|
||||||
};
|
|
||||||
v2_uuids
|
|
||||||
.into_iter()
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid })
|
.map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid })
|
||||||
.collect()
|
.collect()
|
||||||
@@ -36,17 +30,11 @@ impl CompatV2ToV3 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn version(&self) -> crate::Version {
|
pub fn version(&self) -> crate::Version {
|
||||||
match self {
|
self.from.version()
|
||||||
CompatV2ToV3::V2(from) => from.version(),
|
|
||||||
CompatV2ToV3::Compat(compat) => compat.version(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn date(&self) -> Option<time::OffsetDateTime> {
|
pub fn date(&self) -> Option<time::OffsetDateTime> {
|
||||||
match self {
|
self.from.date()
|
||||||
CompatV2ToV3::V2(from) => from.date(),
|
|
||||||
CompatV2ToV3::Compat(compat) => compat.date(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn instance_uid(&self) -> Result<Option<uuid::Uuid>> {
|
pub fn instance_uid(&self) -> Result<Option<uuid::Uuid>> {
|
||||||
@@ -54,18 +42,10 @@ impl CompatV2ToV3 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<CompatIndexV2ToV3>> + '_> {
|
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<CompatIndexV2ToV3>> + '_> {
|
||||||
Ok(match self {
|
Ok(self.from.indexes()?.map(|index_reader| -> Result<_> {
|
||||||
CompatV2ToV3::V2(from) => Box::new(from.indexes()?.map(|index_reader| -> Result<_> {
|
let compat = CompatIndexV2ToV3::new(index_reader?);
|
||||||
let compat = CompatIndexV2ToV3::new(index_reader?);
|
Ok(compat)
|
||||||
Ok(compat)
|
}))
|
||||||
}))
|
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV2ToV3>> + '_>,
|
|
||||||
CompatV2ToV3::Compat(compat) => Box::new(compat.indexes()?.map(|index_reader| {
|
|
||||||
let compat = CompatIndexV2ToV3::Compat(Box::new(index_reader?));
|
|
||||||
Ok(compat)
|
|
||||||
}))
|
|
||||||
as Box<dyn Iterator<Item = Result<CompatIndexV2ToV3>> + '_>,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tasks(
|
pub fn tasks(
|
||||||
@@ -74,13 +54,11 @@ impl CompatV2ToV3 {
|
|||||||
dyn Iterator<Item = Result<(v3::Task, Option<Box<dyn Iterator<Item = Result<Document>>>>)>>
|
dyn Iterator<Item = Result<(v3::Task, Option<Box<dyn Iterator<Item = Result<Document>>>>)>>
|
||||||
+ '_,
|
+ '_,
|
||||||
> {
|
> {
|
||||||
let tasks = match self {
|
let _indexes = self.from.index_uuid.clone();
|
||||||
CompatV2ToV3::V2(from) => from.tasks(),
|
|
||||||
CompatV2ToV3::Compat(compat) => compat.tasks(),
|
|
||||||
};
|
|
||||||
|
|
||||||
Box::new(
|
Box::new(
|
||||||
tasks
|
self.from
|
||||||
|
.tasks()
|
||||||
.map(move |task| {
|
.map(move |task| {
|
||||||
task.map(|(task, content_file)| {
|
task.map(|(task, content_file)| {
|
||||||
let task = v3::Task { uuid: task.uuid, update: task.update.into() };
|
let task = v3::Task { uuid: task.uuid, update: task.update.into() };
|
||||||
@@ -98,38 +76,27 @@ impl CompatV2ToV3 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum CompatIndexV2ToV3 {
|
pub struct CompatIndexV2ToV3 {
|
||||||
V2(v2::V2IndexReader),
|
from: v2::V2IndexReader,
|
||||||
Compat(Box<CompatIndexV1ToV2>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompatIndexV2ToV3 {
|
impl CompatIndexV2ToV3 {
|
||||||
pub fn new(v2: v2::V2IndexReader) -> CompatIndexV2ToV3 {
|
pub fn new(v2: v2::V2IndexReader) -> CompatIndexV2ToV3 {
|
||||||
CompatIndexV2ToV3::V2(v2)
|
CompatIndexV2ToV3 { from: v2 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn metadata(&self) -> &crate::IndexMetadata {
|
pub fn metadata(&self) -> &crate::IndexMetadata {
|
||||||
match self {
|
self.from.metadata()
|
||||||
CompatIndexV2ToV3::V2(from) => from.metadata(),
|
|
||||||
CompatIndexV2ToV3::Compat(compat) => compat.metadata(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn documents(&mut self) -> Result<Box<dyn Iterator<Item = Result<Document>> + '_>> {
|
pub fn documents(&mut self) -> Result<Box<dyn Iterator<Item = Result<Document>> + '_>> {
|
||||||
match self {
|
self.from
|
||||||
CompatIndexV2ToV3::V2(from) => from
|
.documents()
|
||||||
.documents()
|
.map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<Document>> + '_>)
|
||||||
.map(|iter| Box::new(iter) as Box<dyn Iterator<Item = Result<Document>> + '_>),
|
|
||||||
CompatIndexV2ToV3::Compat(compat) => compat.documents(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn settings(&mut self) -> Result<v3::Settings<v3::Checked>> {
|
pub fn settings(&mut self) -> Result<v3::Settings<v3::Checked>> {
|
||||||
let settings = match self {
|
Ok(v3::Settings::<v3::Unchecked>::from(self.from.settings()?).check())
|
||||||
CompatIndexV2ToV3::V2(from) => from.settings()?,
|
|
||||||
CompatIndexV2ToV3::Compat(compat) => compat.settings()?,
|
|
||||||
};
|
|
||||||
Ok(v3::Settings::<v3::Unchecked>::from(settings).check())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -414,6 +381,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn compat_v2_v3() {
|
fn compat_v2_v3() {
|
||||||
let dump = File::open("tests/assets/v2.dump").unwrap();
|
let dump = File::open("tests/assets/v2.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -459,7 +427,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"54b3d7a0d96de35427d867fa17164a99");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
@@ -474,7 +442,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"ae7c5ade2243a553152dab2f354e9095");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
@@ -489,7 +457,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies2.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"1be82b894556d23953af557b6a328a58");
|
||||||
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
@@ -504,7 +472,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"1be82b894556d23953af557b6a328a58");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
@@ -347,6 +347,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn compat_v3_v4() {
|
fn compat_v3_v4() {
|
||||||
let dump = File::open("tests/assets/v3.dump").unwrap();
|
let dump = File::open("tests/assets/v3.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -396,7 +397,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"d3402aff19b90acea9e9a07c466690aa");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
@@ -411,7 +412,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"687aaab250f01b55d57bc69aa313b581");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
@@ -426,7 +427,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies2.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"cd9fedbd7e3492831a94da62c90013ea");
|
||||||
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
@@ -441,7 +442,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"cd9fedbd7e3492831a94da62c90013ea");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
@@ -383,6 +383,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn compat_v4_v5() {
|
fn compat_v4_v5() {
|
||||||
let dump = File::open("tests/assets/v4.dump").unwrap();
|
let dump = File::open("tests/assets/v4.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -429,7 +430,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"26947283836ee4cdf0974f82efcc5332");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
@@ -444,7 +445,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"156871410d17e23803d0c90ddc6a66cb");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
||||||
@@ -459,7 +460,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"69c9916142612cf4a2da9b9ed9455e9e");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
@@ -402,6 +402,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn compat_v5_v6() {
|
fn compat_v5_v6() {
|
||||||
let dump = File::open("tests/assets/v5.dump").unwrap();
|
let dump = File::open("tests/assets/v5.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -419,7 +420,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"42d4200cf6d92a6449989ca48cd8e28a");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
@@ -449,7 +450,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"8e5cadabf74aebe1160bf51c3d489efe");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
@@ -464,7 +465,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4894ac1e74b9e1069ed5ee262b7a1aca");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 200);
|
assert_eq!(documents.len(), 200);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||||
@@ -479,7 +480,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"054dbf08a79e08bb9becba6f5d090f13");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
42
dump/src/reader/error.rs
Normal file
42
dump/src/reader/error.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use meilisearch_auth::error::AuthControllerError;
|
||||||
|
use meilisearch_types::error::{Code, ErrorCode};
|
||||||
|
use meilisearch_types::internal_error;
|
||||||
|
|
||||||
|
use crate::{index_resolver::error::IndexResolverError, tasks::error::TaskError};
|
||||||
|
|
||||||
|
pub type Result<T> = std::result::Result<T, DumpError>;
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum DumpError {
|
||||||
|
#[error("An internal error has occurred. `{0}`.")]
|
||||||
|
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
|
||||||
|
#[error("{0}")]
|
||||||
|
IndexResolver(Box<IndexResolverError>),
|
||||||
|
}
|
||||||
|
|
||||||
|
internal_error!(
|
||||||
|
DumpError: milli::heed::Error,
|
||||||
|
std::io::Error,
|
||||||
|
tokio::task::JoinError,
|
||||||
|
tokio::sync::oneshot::error::RecvError,
|
||||||
|
serde_json::error::Error,
|
||||||
|
tempfile::PersistError,
|
||||||
|
fs_extra::error::Error,
|
||||||
|
AuthControllerError,
|
||||||
|
TaskError
|
||||||
|
);
|
||||||
|
|
||||||
|
impl From<IndexResolverError> for DumpError {
|
||||||
|
fn from(e: IndexResolverError) -> Self {
|
||||||
|
Self::IndexResolver(Box::new(e))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErrorCode for DumpError {
|
||||||
|
fn error_code(&self) -> Code {
|
||||||
|
match self {
|
||||||
|
DumpError::Internal(_) => Code::Internal,
|
||||||
|
DumpError::IndexResolver(e) => e.error_code(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,11 +9,11 @@ use self::compat::v4_to_v5::CompatV4ToV5;
|
|||||||
use self::compat::v5_to_v6::{CompatIndexV5ToV6, CompatV5ToV6};
|
use self::compat::v5_to_v6::{CompatIndexV5ToV6, CompatV5ToV6};
|
||||||
use self::v5::V5Reader;
|
use self::v5::V5Reader;
|
||||||
use self::v6::{V6IndexReader, V6Reader};
|
use self::v6::{V6IndexReader, V6Reader};
|
||||||
use crate::{Result, Version};
|
use crate::{Error, Result, Version};
|
||||||
|
|
||||||
mod compat;
|
mod compat;
|
||||||
|
|
||||||
pub(self) mod v1;
|
// pub(self) mod v1;
|
||||||
pub(self) mod v2;
|
pub(self) mod v2;
|
||||||
pub(self) mod v3;
|
pub(self) mod v3;
|
||||||
pub(self) mod v4;
|
pub(self) mod v4;
|
||||||
@@ -45,9 +45,8 @@ impl DumpReader {
|
|||||||
let MetadataVersion { dump_version } = serde_json::from_reader(&mut meta_file)?;
|
let MetadataVersion { dump_version } = serde_json::from_reader(&mut meta_file)?;
|
||||||
|
|
||||||
match dump_version {
|
match dump_version {
|
||||||
Version::V1 => {
|
// Version::V1 => Ok(Box::new(v1::Reader::open(path)?)),
|
||||||
Ok(v1::V1Reader::open(path)?.to_v2().to_v3().to_v4().to_v5().to_v6().into())
|
Version::V1 => Err(Error::DumpV1Unsupported),
|
||||||
}
|
|
||||||
Version::V2 => Ok(v2::V2Reader::open(path)?.to_v3().to_v4().to_v5().to_v6().into()),
|
Version::V2 => Ok(v2::V2Reader::open(path)?.to_v3().to_v4().to_v5().to_v6().into()),
|
||||||
Version::V3 => Ok(v3::V3Reader::open(path)?.to_v4().to_v5().to_v6().into()),
|
Version::V3 => Ok(v3::V3Reader::open(path)?.to_v4().to_v5().to_v6().into()),
|
||||||
Version::V4 => Ok(v4::V4Reader::open(path)?.to_v5().to_v6().into()),
|
Version::V4 => Ok(v4::V4Reader::open(path)?.to_v5().to_v6().into()),
|
||||||
@@ -190,6 +189,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn import_dump_v5() {
|
fn import_dump_v5() {
|
||||||
let dump = File::open("tests/assets/v5.dump").unwrap();
|
let dump = File::open("tests/assets/v5.dump").unwrap();
|
||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
@@ -201,7 +201,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"42d4200cf6d92a6449989ca48cd8e28a");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
@@ -231,7 +231,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"8e5cadabf74aebe1160bf51c3d489efe");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
@@ -246,7 +246,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"4894ac1e74b9e1069ed5ee262b7a1aca");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 200);
|
assert_eq!(documents.len(), 200);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||||
@@ -261,13 +261,14 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"054dbf08a79e08bb9becba6f5d090f13");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn import_dump_v4() {
|
fn import_dump_v4() {
|
||||||
let dump = File::open("tests/assets/v4.dump").unwrap();
|
let dump = File::open("tests/assets/v4.dump").unwrap();
|
||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
@@ -299,52 +300,53 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "2022-10-06T12:53:39.360187055Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-06T12:53:40.603035979Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"1f9da51a4518166fb440def5437eafdb");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "2022-10-06T12:53:38.710611568Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-06T12:53:49.785862546Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"488816aba82c1bd65f1609630055c611");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "2022-10-06T12:53:40.831649057Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-06T12:53:41.116036186Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7b4f66dad597dc651650f35fe34be27f");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn import_dump_v3() {
|
fn import_dump_v3() {
|
||||||
let dump = File::open("tests/assets/v3.dump").unwrap();
|
let dump = File::open("tests/assets/v3.dump").unwrap();
|
||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
@@ -386,7 +388,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"855f3165dec609b919171ff83f82b364");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
@@ -401,7 +403,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"43e0bf1746c3ea1d64c1e10ea544c190");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
@@ -416,7 +418,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies2.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"5fd06a5038f49311600379d43412b655");
|
||||||
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
@@ -431,13 +433,14 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"5fd06a5038f49311600379d43412b655");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn import_dump_v2() {
|
fn import_dump_v2() {
|
||||||
let dump = File::open("tests/assets/v2.dump").unwrap();
|
let dump = File::open("tests/assets/v2.dump").unwrap();
|
||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
@@ -479,7 +482,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"b15b71f56dd082d8e8ec5182e688bf36");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
@@ -494,7 +497,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"5389153ddf5527fa79c54b6a6e9c21f6");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
@@ -509,7 +512,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies2.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"8aebab01301d266acf3e18dd449c008f");
|
||||||
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
@@ -524,86 +527,9 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"8aebab01301d266acf3e18dd449c008f");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn import_dump_v1() {
|
|
||||||
let dump = File::open("tests/assets/v1.dump").unwrap();
|
|
||||||
let mut dump = DumpReader::open(dump).unwrap();
|
|
||||||
|
|
||||||
// top level infos
|
|
||||||
assert_eq!(dump.date(), None);
|
|
||||||
assert_eq!(dump.instance_uid().unwrap(), None);
|
|
||||||
|
|
||||||
// tasks
|
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b3e3652bfc10a76670be157d2507d761");
|
|
||||||
assert_eq!(update_files.len(), 9);
|
|
||||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1
|
|
||||||
|
|
||||||
// keys
|
|
||||||
let keys = dump.keys().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(keys), @"[]");
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"d751713988987e9331980363e24189ce");
|
|
||||||
|
|
||||||
// indexes
|
|
||||||
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
// the index are not ordered in any way by default
|
|
||||||
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
|
||||||
|
|
||||||
let mut products = indexes.pop().unwrap();
|
|
||||||
let mut movies = indexes.pop().unwrap();
|
|
||||||
let mut spells = indexes.pop().unwrap();
|
|
||||||
assert!(indexes.is_empty());
|
|
||||||
|
|
||||||
// products
|
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "products",
|
|
||||||
"primaryKey": "sku",
|
|
||||||
"createdAt": "2022-10-02T13:23:39.976870431Z",
|
|
||||||
"updatedAt": "2022-10-02T13:27:54.353262482Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
|
||||||
|
|
||||||
// movies
|
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "movies",
|
|
||||||
"primaryKey": "id",
|
|
||||||
"createdAt": "2022-10-02T13:15:29.477512777Z",
|
|
||||||
"updatedAt": "2022-10-02T13:21:12.671204856Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b63dbed5bbc059f3e32bc471ae699bf5");
|
|
||||||
|
|
||||||
// spells
|
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "dnd_spells",
|
|
||||||
"primaryKey": "index",
|
|
||||||
"createdAt": "2022-10-02T13:38:26.358882984Z",
|
|
||||||
"updatedAt": "2022-10-02T13:38:26.385609433Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"aa24c0cfc733d66c396237ad44263bed");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies2.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies2.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"sortableAttributes": [
|
|
||||||
"release_date"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"sortableAttributes": [
|
|
||||||
"release_date"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"maxValuesPerFacet": 100
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"maxTotalHits": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"maxValuesPerFacet": 100
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"maxTotalHits": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"sortableAttributes": [
|
|
||||||
"release_date"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"maxValuesPerFacet": 100
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"maxTotalHits": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,262 +1,173 @@
|
|||||||
use std::fs::{self, File};
|
use std::{
|
||||||
use std::io::{BufRead, BufReader};
|
convert::Infallible,
|
||||||
use std::path::{Path, PathBuf};
|
fs::{self, File},
|
||||||
|
io::{BufRead, BufReader},
|
||||||
|
path::Path,
|
||||||
|
};
|
||||||
|
|
||||||
use serde::Deserialize;
|
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::compat::v1_to_v2::CompatV1ToV2;
|
use self::update::UpdateStatus;
|
||||||
use super::Document;
|
|
||||||
use crate::{IndexMetadata, Result, Version};
|
use super::{DumpReader, IndexReader};
|
||||||
|
use crate::{Error, Result, Version};
|
||||||
|
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod update;
|
pub mod update;
|
||||||
|
pub mod v1;
|
||||||
|
|
||||||
pub struct V1Reader {
|
pub struct V1Reader {
|
||||||
pub dump: TempDir,
|
dump: TempDir,
|
||||||
pub db_version: String,
|
metadata: v1::Metadata,
|
||||||
pub dump_version: crate::Version,
|
indexes: Vec<V1IndexReader>,
|
||||||
indexes: Vec<V1Index>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct IndexUuid {
|
struct V1IndexReader {
|
||||||
pub name: String,
|
name: String,
|
||||||
pub uid: String,
|
|
||||||
}
|
|
||||||
pub type Task = self::update::UpdateStatus;
|
|
||||||
|
|
||||||
struct V1Index {
|
|
||||||
metadata: IndexMetadataV1,
|
|
||||||
path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl V1Index {
|
|
||||||
pub fn new(path: PathBuf, metadata: Index) -> Self {
|
|
||||||
Self { metadata: metadata.into(), path }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn open(&self) -> Result<V1IndexReader> {
|
|
||||||
V1IndexReader::new(&self.path, self.metadata.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn metadata(&self) -> &IndexMetadata {
|
|
||||||
&self.metadata.metadata
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct V1IndexReader {
|
|
||||||
metadata: IndexMetadataV1,
|
|
||||||
documents: BufReader<File>,
|
documents: BufReader<File>,
|
||||||
settings: BufReader<File>,
|
settings: BufReader<File>,
|
||||||
updates: BufReader<File>,
|
updates: BufReader<File>,
|
||||||
|
|
||||||
|
current_update: Option<UpdateStatus>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl V1IndexReader {
|
impl V1IndexReader {
|
||||||
pub fn new(path: &Path, metadata: IndexMetadataV1) -> Result<Self> {
|
pub fn new(name: String, path: &Path) -> Result<Self> {
|
||||||
Ok(V1IndexReader {
|
let mut ret = V1IndexReader {
|
||||||
metadata,
|
name,
|
||||||
documents: BufReader::new(File::open(path.join("documents.jsonl"))?),
|
documents: BufReader::new(File::open(path.join("documents.jsonl"))?),
|
||||||
settings: BufReader::new(File::open(path.join("settings.json"))?),
|
settings: BufReader::new(File::open(path.join("settings.json"))?),
|
||||||
updates: BufReader::new(File::open(path.join("updates.jsonl"))?),
|
updates: BufReader::new(File::open(path.join("updates.jsonl"))?),
|
||||||
})
|
current_update: None,
|
||||||
|
};
|
||||||
|
ret.next_update();
|
||||||
|
|
||||||
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn metadata(&self) -> &IndexMetadata {
|
pub fn next_update(&mut self) -> Result<Option<UpdateStatus>> {
|
||||||
&self.metadata.metadata
|
let current_update = if let Some(line) = self.updates.lines().next() {
|
||||||
}
|
Some(serde_json::from_str(&line?)?)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
pub fn documents(&mut self) -> Result<impl Iterator<Item = Result<Document>> + '_> {
|
Ok(std::mem::replace(&mut self.current_update, current_update))
|
||||||
Ok((&mut self.documents)
|
|
||||||
.lines()
|
|
||||||
.map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn settings(&mut self) -> Result<self::settings::Settings> {
|
|
||||||
Ok(serde_json::from_reader(&mut self.settings)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tasks(self) -> impl Iterator<Item = Result<Task>> {
|
|
||||||
self.updates.lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) })
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl V1Reader {
|
impl V1Reader {
|
||||||
pub fn open(dump: TempDir) -> Result<Self> {
|
pub fn open(dump: TempDir) -> Result<Self> {
|
||||||
let meta_file = fs::read(dump.path().join("metadata.json"))?;
|
let mut meta_file = fs::read(dump.path().join("metadata.json"))?;
|
||||||
let metadata: Metadata = serde_json::from_reader(&*meta_file)?;
|
let metadata = serde_json::from_reader(&*meta_file)?;
|
||||||
|
|
||||||
let mut indexes = Vec::new();
|
let mut indexes = Vec::new();
|
||||||
|
|
||||||
for index in metadata.indexes.into_iter() {
|
let entries = fs::read_dir(dump.path())?;
|
||||||
let index_path = dump.path().join(&index.uid);
|
for entry in entries {
|
||||||
indexes.push(V1Index::new(index_path, index));
|
let entry = entry?;
|
||||||
|
if entry.file_type()?.is_dir() {
|
||||||
|
indexes.push(V1IndexReader::new(
|
||||||
|
entry
|
||||||
|
.file_name()
|
||||||
|
.to_str()
|
||||||
|
.ok_or(Error::BadIndexName)?
|
||||||
|
.to_string(),
|
||||||
|
&entry.path(),
|
||||||
|
)?);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(V1Reader {
|
Ok(V1Reader {
|
||||||
dump,
|
dump,
|
||||||
|
metadata,
|
||||||
indexes,
|
indexes,
|
||||||
db_version: metadata.db_version,
|
|
||||||
dump_version: metadata.dump_version,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_v2(self) -> CompatV1ToV2 {
|
fn next_update(&mut self) -> Result<Option<UpdateStatus>> {
|
||||||
CompatV1ToV2 { from: self }
|
if let Some((idx, _)) = self
|
||||||
}
|
.indexes
|
||||||
|
|
||||||
pub fn index_uuid(&self) -> Vec<IndexUuid> {
|
|
||||||
self.indexes
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|index| IndexUuid {
|
.map(|index| index.current_update)
|
||||||
name: index.metadata.name.to_owned(),
|
.enumerate()
|
||||||
uid: index.metadata().uid.to_owned(),
|
.filter_map(|(idx, update)| update.map(|u| (idx, u)))
|
||||||
})
|
.min_by_key(|(_, update)| update.enqueued_at())
|
||||||
.collect()
|
{
|
||||||
|
self.indexes[idx].next_update()
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IndexReader for &V1IndexReader {
|
||||||
|
type Document = serde_json::Map<String, serde_json::Value>;
|
||||||
|
type Settings = settings::Settings;
|
||||||
|
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
todo!()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn version(&self) -> Version {
|
fn documents(&self) -> Result<Box<dyn Iterator<Item = Result<Self::Document>>>> {
|
||||||
Version::V1
|
todo!()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn date(&self) -> Option<OffsetDateTime> {
|
fn settings(&self) -> Result<Self::Settings> {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DumpReader for V1Reader {
|
||||||
|
type Document = serde_json::Map<String, serde_json::Value>;
|
||||||
|
type Settings = settings::Settings;
|
||||||
|
|
||||||
|
type Task = update::UpdateStatus;
|
||||||
|
type UpdateFile = Infallible;
|
||||||
|
|
||||||
|
type Key = Infallible;
|
||||||
|
|
||||||
|
fn date(&self) -> Option<OffsetDateTime> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<V1IndexReader>> + '_> {
|
fn version(&self) -> Version {
|
||||||
Ok(self.indexes.iter().map(|index| index.open()))
|
Version::V1
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
fn indexes(
|
||||||
#[derive(Debug, Deserialize)]
|
&self,
|
||||||
#[serde(rename_all = "camelCase")]
|
) -> Result<
|
||||||
pub struct Index {
|
Box<
|
||||||
pub name: String,
|
dyn Iterator<
|
||||||
pub uid: String,
|
Item = Result<
|
||||||
#[serde(with = "time::serde::rfc3339")]
|
Box<
|
||||||
created_at: OffsetDateTime,
|
dyn super::IndexReader<
|
||||||
#[serde(with = "time::serde::rfc3339")]
|
Document = Self::Document,
|
||||||
updated_at: OffsetDateTime,
|
Settings = Self::Settings,
|
||||||
pub primary_key: Option<String>,
|
>,
|
||||||
}
|
>,
|
||||||
|
>,
|
||||||
#[derive(Clone)]
|
>,
|
||||||
pub struct IndexMetadataV1 {
|
>,
|
||||||
pub name: String,
|
> {
|
||||||
pub metadata: crate::IndexMetadata,
|
Ok(Box::new(self.indexes.iter().map(|index| {
|
||||||
}
|
let index = Box::new(index)
|
||||||
|
as Box<dyn IndexReader<Document = Self::Document, Settings = Self::Settings>>;
|
||||||
impl From<Index> for IndexMetadataV1 {
|
Ok(index)
|
||||||
fn from(index: Index) -> Self {
|
})))
|
||||||
IndexMetadataV1 {
|
}
|
||||||
name: index.name,
|
|
||||||
metadata: crate::IndexMetadata {
|
fn tasks(&self) -> Box<dyn Iterator<Item = Result<(Self::Task, Option<Self::UpdateFile>)>>> {
|
||||||
uid: index.uid,
|
Box::new(std::iter::from_fn(|| {
|
||||||
primary_key: index.primary_key,
|
self.next_update()
|
||||||
created_at: index.created_at,
|
.transpose()
|
||||||
updated_at: index.updated_at,
|
.map(|result| result.map(|task| (task, None)))
|
||||||
},
|
}))
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
fn keys(&self) -> Box<dyn Iterator<Item = Result<Self::Key>>> {
|
||||||
|
Box::new(std::iter::empty())
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct Metadata {
|
|
||||||
pub indexes: Vec<Index>,
|
|
||||||
pub db_version: String,
|
|
||||||
pub dump_version: crate::Version,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) mod test {
|
|
||||||
use std::fs::File;
|
|
||||||
use std::io::BufReader;
|
|
||||||
|
|
||||||
use flate2::bufread::GzDecoder;
|
|
||||||
use meili_snap::insta;
|
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_dump_v1() {
|
|
||||||
let dump = File::open("tests/assets/v1.dump").unwrap();
|
|
||||||
let dir = TempDir::new().unwrap();
|
|
||||||
let mut dump = BufReader::new(dump);
|
|
||||||
let gz = GzDecoder::new(&mut dump);
|
|
||||||
let mut archive = tar::Archive::new(gz);
|
|
||||||
archive.unpack(dir.path()).unwrap();
|
|
||||||
|
|
||||||
let dump = V1Reader::open(dir).unwrap();
|
|
||||||
|
|
||||||
// top level infos
|
|
||||||
assert_eq!(dump.date(), None);
|
|
||||||
|
|
||||||
// indexes
|
|
||||||
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
|
|
||||||
let mut products = indexes.pop().unwrap();
|
|
||||||
let mut movies = indexes.pop().unwrap();
|
|
||||||
let mut dnd_spells = indexes.pop().unwrap();
|
|
||||||
|
|
||||||
assert!(indexes.is_empty());
|
|
||||||
|
|
||||||
// products
|
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "products",
|
|
||||||
"primaryKey": "sku",
|
|
||||||
"createdAt": "2022-10-02T13:23:39.976870431Z",
|
|
||||||
"updatedAt": "2022-10-02T13:27:54.353262482Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
|
||||||
|
|
||||||
// products tasks
|
|
||||||
let tasks = products.tasks().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"91de507f206ad21964584021932ba7a7");
|
|
||||||
|
|
||||||
// movies
|
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "movies",
|
|
||||||
"primaryKey": "id",
|
|
||||||
"createdAt": "2022-10-02T13:15:29.477512777Z",
|
|
||||||
"updatedAt": "2022-10-02T13:21:12.671204856Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b63dbed5bbc059f3e32bc471ae699bf5");
|
|
||||||
|
|
||||||
// movies tasks
|
|
||||||
let tasks = movies.tasks().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"55eef4de2bef7e84c5ce0bee47488f56");
|
|
||||||
|
|
||||||
// spells
|
|
||||||
insta::assert_json_snapshot!(dnd_spells.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "dnd_spells",
|
|
||||||
"primaryKey": "index",
|
|
||||||
"createdAt": "2022-10-02T13:38:26.358882984Z",
|
|
||||||
"updatedAt": "2022-10-02T13:38:26.385609433Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(dnd_spells.settings().unwrap());
|
|
||||||
let documents = dnd_spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
assert_eq!(documents.len(), 10);
|
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"aa24c0cfc733d66c396237ad44263bed");
|
|
||||||
|
|
||||||
// spells tasks
|
|
||||||
let tasks = dnd_spells.tasks().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"836dd7d64d5ad20ad901c44b1b161a4c");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
use std::result::Result as StdResult;
|
use std::result::Result as StdResult;
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
|
|
||||||
#[derive(Default, Clone, Serialize, Deserialize, Debug)]
|
#[derive(Default, Clone, Serialize, Deserialize, Debug)]
|
||||||
@@ -56,34 +53,6 @@ pub enum RankingRule {
|
|||||||
Desc(String),
|
Desc(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
static ASC_DESC_REGEX: Lazy<Regex> =
|
|
||||||
Lazy::new(|| Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#).unwrap());
|
|
||||||
|
|
||||||
impl FromStr for RankingRule {
|
|
||||||
type Err = ();
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
Ok(match s {
|
|
||||||
"typo" => Self::Typo,
|
|
||||||
"words" => Self::Words,
|
|
||||||
"proximity" => Self::Proximity,
|
|
||||||
"attribute" => Self::Attribute,
|
|
||||||
"wordsPosition" => Self::WordsPosition,
|
|
||||||
"exactness" => Self::Exactness,
|
|
||||||
text => {
|
|
||||||
let caps = ASC_DESC_REGEX.captures(text).ok_or(())?;
|
|
||||||
let order = caps.get(1).unwrap().as_str();
|
|
||||||
let field_name = caps.get(2).unwrap().as_str();
|
|
||||||
match order {
|
|
||||||
"asc" => Self::Asc(field_name.to_string()),
|
|
||||||
"desc" => Self::Desc(field_name.to_string()),
|
|
||||||
_ => return Err(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Any value that is present is considered Some value, including null.
|
// Any value that is present is considered Some value, including null.
|
||||||
fn deserialize_some<'de, T, D>(deserializer: D) -> StdResult<Option<T>, D::Error>
|
fn deserialize_some<'de, T, D>(deserializer: D) -> StdResult<Option<T>, D::Error>
|
||||||
where
|
where
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v1/mod.rs
|
|
||||||
expression: dnd_spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"wordsPosition",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"attributesForFaceting": []
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v1/mod.rs
|
|
||||||
expression: dnd_spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"wordsPosition",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"attributesForFaceting": []
|
|
||||||
}
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v1/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"wordsPosition",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"attributesForFaceting": []
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v1/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"wordsPosition",
|
|
||||||
"exactness",
|
|
||||||
"asc(release_date)"
|
|
||||||
],
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"attributesForFaceting": [
|
|
||||||
"id",
|
|
||||||
"genres"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v1/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"wordsPosition",
|
|
||||||
"exactness",
|
|
||||||
"asc(release_date)"
|
|
||||||
],
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"attributesForFaceting": [
|
|
||||||
"id",
|
|
||||||
"genres"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v1/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"wordsPosition",
|
|
||||||
"exactness",
|
|
||||||
"asc(release_date)"
|
|
||||||
],
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"attributesForFaceting": [
|
|
||||||
"id",
|
|
||||||
"genres"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v1/mod.rs
|
|
||||||
expression: dnd_spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"rankingRules": [
|
|
||||||
"typo",
|
|
||||||
"words",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"wordsPosition",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"attributesForFaceting": []
|
|
||||||
}
|
|
||||||
@@ -1,8 +1,54 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use serde_json::Value;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::settings::SettingsUpdate;
|
use super::settings::SettingsUpdate;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct Update {
|
||||||
|
data: UpdateData,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
enqueued_at: OffsetDateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum UpdateData {
|
||||||
|
ClearAll,
|
||||||
|
Customs(Vec<u8>),
|
||||||
|
// (primary key, documents)
|
||||||
|
DocumentsAddition {
|
||||||
|
primary_key: Option<String>,
|
||||||
|
documents: Vec<serde_json::Map<String, Value>>,
|
||||||
|
},
|
||||||
|
DocumentsPartial {
|
||||||
|
primary_key: Option<String>,
|
||||||
|
documents: Vec<serde_json::Map<String, Value>>,
|
||||||
|
},
|
||||||
|
DocumentsDeletion(Vec<String>),
|
||||||
|
Settings(Box<SettingsUpdate>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UpdateData {
|
||||||
|
pub fn update_type(&self) -> UpdateType {
|
||||||
|
match self {
|
||||||
|
UpdateData::ClearAll => UpdateType::ClearAll,
|
||||||
|
UpdateData::Customs(_) => UpdateType::Customs,
|
||||||
|
UpdateData::DocumentsAddition { documents, .. } => UpdateType::DocumentsAddition {
|
||||||
|
number: documents.len(),
|
||||||
|
},
|
||||||
|
UpdateData::DocumentsPartial { documents, .. } => UpdateType::DocumentsPartial {
|
||||||
|
number: documents.len(),
|
||||||
|
},
|
||||||
|
UpdateData::DocumentsDeletion(deletion) => UpdateType::DocumentsDeletion {
|
||||||
|
number: deletion.len(),
|
||||||
|
},
|
||||||
|
UpdateData::Settings(update) => UpdateType::Settings {
|
||||||
|
settings: update.clone(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
#[serde(tag = "name")]
|
#[serde(tag = "name")]
|
||||||
pub enum UpdateType {
|
pub enum UpdateType {
|
||||||
|
|||||||
22
dump/src/reader/v1/v1.rs
Normal file
22
dump/src/reader/v1/v1.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use serde::Deserialize;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct Index {
|
||||||
|
pub name: String,
|
||||||
|
pub uid: String,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
created_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
updated_at: OffsetDateTime,
|
||||||
|
pub primary_key: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct Metadata {
|
||||||
|
indexes: Vec<Index>,
|
||||||
|
db_version: String,
|
||||||
|
dump_version: crate::Version,
|
||||||
|
}
|
||||||
@@ -211,6 +211,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn read_dump_v2() {
|
fn read_dump_v2() {
|
||||||
let dump = File::open("tests/assets/v2.dump").unwrap();
|
let dump = File::open("tests/assets/v2.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -256,7 +257,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"c41bf7315d404da46c99b9e3a2a3cc1e");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
@@ -271,7 +272,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"3d1d96c85b6bab46e957bc8d2532a910");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
@@ -286,7 +287,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies2.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"4f04afc086828d8da0da57a7d598ddba");
|
||||||
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
@@ -301,7 +302,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"4f04afc086828d8da0da57a7d598ddba");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
use std::fmt::Display;
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
@@ -175,17 +174,3 @@ impl FromStr for Criterion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Criterion {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
Criterion::Words => write!(f, "words"),
|
|
||||||
Criterion::Typo => write!(f, "typo"),
|
|
||||||
Criterion::Proximity => write!(f, "proximity"),
|
|
||||||
Criterion::Attribute => write!(f, "attribute"),
|
|
||||||
Criterion::Exactness => write!(f, "exactness"),
|
|
||||||
Criterion::Asc(field_name) => write!(f, "asc({})", field_name),
|
|
||||||
Criterion::Desc(field_name) => write!(f, "desc({})", field_name),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v2/mod.rs
|
|
||||||
expression: movies2.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v2/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v2/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v2/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"exactness",
|
|
||||||
"asc(release_date)"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -227,6 +227,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn read_dump_v3() {
|
fn read_dump_v3() {
|
||||||
let dump = File::open("tests/assets/v3.dump").unwrap();
|
let dump = File::open("tests/assets/v3.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -272,7 +273,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"f309b009608cc0b770b2f74516f92647");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
@@ -287,7 +288,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"95dff22ba3a7019616c12df9daa35e1e");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d153b5a81d8b3cdcbe1dec270b574022");
|
||||||
@@ -302,7 +303,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies2.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies2.settings()), @"1dafc4b123e3a8e14a889719cc01f6e5");
|
||||||
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies2.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 0);
|
assert_eq!(documents.len(), 0);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
@@ -317,7 +318,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"1dafc4b123e3a8e14a889719cc01f6e5");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v3/mod.rs
|
|
||||||
expression: movies2.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v3/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v3/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v3/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"sortableAttributes": [
|
|
||||||
"release_date"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null
|
|
||||||
}
|
|
||||||
@@ -13,7 +13,7 @@ pub mod meta;
|
|||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
|
|
||||||
use self::meta::{DumpMeta, IndexMeta, IndexUuid};
|
use self::meta::{DumpMeta, IndexUuid};
|
||||||
use super::compat::v4_to_v5::CompatV4ToV5;
|
use super::compat::v4_to_v5::CompatV4ToV5;
|
||||||
use crate::{Error, IndexMetadata, Result, Version};
|
use crate::{Error, IndexMetadata, Result, Version};
|
||||||
|
|
||||||
@@ -100,10 +100,6 @@ impl V4Reader {
|
|||||||
V4IndexReader::new(
|
V4IndexReader::new(
|
||||||
index.uid.clone(),
|
index.uid.clone(),
|
||||||
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
|
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
|
||||||
&index.index_meta,
|
|
||||||
BufReader::new(
|
|
||||||
File::open(self.dump.path().join("updates").join("data.jsonl")).unwrap(),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@@ -151,44 +147,16 @@ pub struct V4IndexReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl V4IndexReader {
|
impl V4IndexReader {
|
||||||
pub fn new(
|
pub fn new(name: String, path: &Path) -> Result<Self> {
|
||||||
name: String,
|
|
||||||
path: &Path,
|
|
||||||
index_metadata: &IndexMeta,
|
|
||||||
tasks: BufReader<File>,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let meta = File::open(path.join("meta.json"))?;
|
let meta = File::open(path.join("meta.json"))?;
|
||||||
let meta: DumpMeta = serde_json::from_reader(meta)?;
|
let meta: DumpMeta = serde_json::from_reader(meta)?;
|
||||||
|
|
||||||
let mut created_at = None;
|
|
||||||
let mut updated_at = None;
|
|
||||||
|
|
||||||
for line in tasks.lines() {
|
|
||||||
let task: Task = serde_json::from_str(&line?)?;
|
|
||||||
|
|
||||||
if task.index_uid.to_string() == name {
|
|
||||||
// The first task to match our index_uid that succeeded (ie. processed_at returns Some)
|
|
||||||
// is our `last_updated_at`.
|
|
||||||
if updated_at.is_none() {
|
|
||||||
updated_at = task.processed_at()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Once we reach the `creation_task_id` we can stop iterating on the task queue and
|
|
||||||
// this task represents our `created_at`.
|
|
||||||
if task.id as usize == index_metadata.creation_task_id {
|
|
||||||
created_at = task.created_at();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let current_time = OffsetDateTime::now_utc();
|
|
||||||
|
|
||||||
let metadata = IndexMetadata {
|
let metadata = IndexMetadata {
|
||||||
uid: name,
|
uid: name,
|
||||||
primary_key: meta.primary_key,
|
primary_key: meta.primary_key,
|
||||||
created_at: created_at.unwrap_or(current_time),
|
// FIXME: Iterate over the whole task queue to find the creation and last update date.
|
||||||
updated_at: updated_at.unwrap_or(current_time),
|
created_at: OffsetDateTime::now_utc(),
|
||||||
|
updated_at: OffsetDateTime::now_utc(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let ret = V4IndexReader {
|
let ret = V4IndexReader {
|
||||||
@@ -251,6 +219,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn read_dump_v4() {
|
fn read_dump_v4() {
|
||||||
let dump = File::open("tests/assets/v4.dump").unwrap();
|
let dump = File::open("tests/assets/v4.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -291,46 +260,46 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "2022-10-06T12:53:39.360187055Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-06T12:53:40.603035979Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"65b139c6b9fc251e187073c8557803e2");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "2022-10-06T12:53:38.710611568Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-06T12:53:49.785862546Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"06aa1988493485d9b2cda7c751e6bb15");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 110);
|
assert_eq!(documents.len(), 110);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"786022a66ecb992c8a2a60fee070a5ab");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "2022-10-06T12:53:40.831649057Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-06T12:53:41.116036186Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"7d722fc2629eaa45032ed3deb0c9b4ce");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
@@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v4/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
],
|
|
||||||
"sortableAttributes": [
|
|
||||||
"release_date"
|
|
||||||
],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v4/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v4/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"searchableAttributes": [
|
|
||||||
"*"
|
|
||||||
],
|
|
||||||
"filterableAttributes": [],
|
|
||||||
"sortableAttributes": [],
|
|
||||||
"rankingRules": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
],
|
|
||||||
"stopWords": [],
|
|
||||||
"synonyms": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"distinctAttribute": null,
|
|
||||||
"typoTolerance": {
|
|
||||||
"enabled": true,
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"oneTypo": 5,
|
|
||||||
"twoTypos": 9
|
|
||||||
},
|
|
||||||
"disableOnWords": [],
|
|
||||||
"disableOnAttributes": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -104,41 +104,6 @@ impl Task {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn processed_at(&self) -> Option<OffsetDateTime> {
|
|
||||||
match self.events.last() {
|
|
||||||
Some(TaskEvent::Succeded { result: _, timestamp }) => Some(*timestamp),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn created_at(&self) -> Option<OffsetDateTime> {
|
|
||||||
match &self.content {
|
|
||||||
TaskContent::IndexCreation { primary_key: _ } => match self.events.first() {
|
|
||||||
Some(TaskEvent::Created(ts)) => Some(*ts),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
TaskContent::DocumentAddition {
|
|
||||||
content_uuid: _,
|
|
||||||
merge_strategy: _,
|
|
||||||
primary_key: _,
|
|
||||||
documents_count: _,
|
|
||||||
allow_index_creation: _,
|
|
||||||
} => match self.events.first() {
|
|
||||||
Some(TaskEvent::Created(ts)) => Some(*ts),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
TaskContent::SettingsUpdate {
|
|
||||||
settings: _,
|
|
||||||
is_deletion: _,
|
|
||||||
allow_index_creation: _,
|
|
||||||
} => match self.events.first() {
|
|
||||||
Some(TaskEvent::Created(ts)) => Some(*ts),
|
|
||||||
_ => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the content_uuid of the `Task` if there is one.
|
/// Return the content_uuid of the `Task` if there is one.
|
||||||
pub fn get_content_uuid(&self) -> Option<Uuid> {
|
pub fn get_content_uuid(&self) -> Option<Uuid> {
|
||||||
match self {
|
match self {
|
||||||
|
|||||||
@@ -261,6 +261,7 @@ pub(crate) mod test {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore]
|
||||||
fn read_dump_v5() {
|
fn read_dump_v5() {
|
||||||
let dump = File::open("tests/assets/v5.dump").unwrap();
|
let dump = File::open("tests/assets/v5.dump").unwrap();
|
||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
@@ -311,7 +312,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", products.settings()), @"b392b928dab63468318b2bdaad844c5a");
|
||||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||||
@@ -326,7 +327,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", movies.settings()), @"2f881248b7c3623e2ba2885dbf0b2c18");
|
||||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 200);
|
assert_eq!(documents.len(), 200);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||||
@@ -341,7 +342,7 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
meili_snap::snapshot_hash!(format!("{:#?}", spells.settings()), @"ade154e63ab713de67919892917d3d9d");
|
||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|||||||
@@ -1,74 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v5/mod.rs
|
|
||||||
expression: movies.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": "Reset",
|
|
||||||
"searchableAttributes": "Reset",
|
|
||||||
"filterableAttributes": {
|
|
||||||
"Set": [
|
|
||||||
"genres",
|
|
||||||
"id"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"sortableAttributes": {
|
|
||||||
"Set": [
|
|
||||||
"release_date"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"rankingRules": {
|
|
||||||
"Set": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness",
|
|
||||||
"release_date:asc"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"stopWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"synonyms": {
|
|
||||||
"Set": {}
|
|
||||||
},
|
|
||||||
"distinctAttribute": "Reset",
|
|
||||||
"typoTolerance": {
|
|
||||||
"Set": {
|
|
||||||
"enabled": {
|
|
||||||
"Set": true
|
|
||||||
},
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"Set": {
|
|
||||||
"oneTypo": {
|
|
||||||
"Set": 5
|
|
||||||
},
|
|
||||||
"twoTypos": {
|
|
||||||
"Set": 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"disableOnWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"disableOnAttributes": {
|
|
||||||
"Set": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"Set": {
|
|
||||||
"maxValuesPerFacet": {
|
|
||||||
"Set": 100
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"Set": {
|
|
||||||
"maxTotalHits": {
|
|
||||||
"Set": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,68 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v5/mod.rs
|
|
||||||
expression: spells.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": "Reset",
|
|
||||||
"searchableAttributes": "Reset",
|
|
||||||
"filterableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"sortableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"rankingRules": {
|
|
||||||
"Set": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"stopWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"synonyms": {
|
|
||||||
"Set": {}
|
|
||||||
},
|
|
||||||
"distinctAttribute": "Reset",
|
|
||||||
"typoTolerance": {
|
|
||||||
"Set": {
|
|
||||||
"enabled": {
|
|
||||||
"Set": true
|
|
||||||
},
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"Set": {
|
|
||||||
"oneTypo": {
|
|
||||||
"Set": 5
|
|
||||||
},
|
|
||||||
"twoTypos": {
|
|
||||||
"Set": 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"disableOnWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"disableOnAttributes": {
|
|
||||||
"Set": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"Set": {
|
|
||||||
"maxValuesPerFacet": {
|
|
||||||
"Set": 100
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"Set": {
|
|
||||||
"maxTotalHits": {
|
|
||||||
"Set": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
---
|
|
||||||
source: dump/src/reader/v5/mod.rs
|
|
||||||
expression: products.settings().unwrap()
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"displayedAttributes": "Reset",
|
|
||||||
"searchableAttributes": "Reset",
|
|
||||||
"filterableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"sortableAttributes": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"rankingRules": {
|
|
||||||
"Set": [
|
|
||||||
"words",
|
|
||||||
"typo",
|
|
||||||
"proximity",
|
|
||||||
"attribute",
|
|
||||||
"sort",
|
|
||||||
"exactness"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"stopWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"synonyms": {
|
|
||||||
"Set": {
|
|
||||||
"android": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"iphone": [
|
|
||||||
"phone",
|
|
||||||
"smartphone"
|
|
||||||
],
|
|
||||||
"phone": [
|
|
||||||
"android",
|
|
||||||
"iphone",
|
|
||||||
"smartphone"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"distinctAttribute": "Reset",
|
|
||||||
"typoTolerance": {
|
|
||||||
"Set": {
|
|
||||||
"enabled": {
|
|
||||||
"Set": true
|
|
||||||
},
|
|
||||||
"minWordSizeForTypos": {
|
|
||||||
"Set": {
|
|
||||||
"oneTypo": {
|
|
||||||
"Set": 5
|
|
||||||
},
|
|
||||||
"twoTypos": {
|
|
||||||
"Set": 9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"disableOnWords": {
|
|
||||||
"Set": []
|
|
||||||
},
|
|
||||||
"disableOnAttributes": {
|
|
||||||
"Set": []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"faceting": {
|
|
||||||
"Set": {
|
|
||||||
"maxValuesPerFacet": {
|
|
||||||
"Set": 100
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"pagination": {
|
|
||||||
"Set": {
|
|
||||||
"maxTotalHits": {
|
|
||||||
"Set": 1000
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user