mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-22 20:56:04 +00:00
Compare commits
236 Commits
prototype-
...
paralleliz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c4cedb5390 | ||
|
|
0b40892eca | ||
|
|
7ddd8a2b66 | ||
|
|
2addedf98a | ||
|
|
a608e57c3c | ||
|
|
398efa3c55 | ||
|
|
307ea38c2a | ||
|
|
cdeca59587 | ||
|
|
8529e2161a | ||
|
|
b80869f2be | ||
|
|
666ae1a3e7 | ||
|
|
f6559258ce | ||
|
|
b5ba0e42b3 | ||
|
|
300f5ce0f4 | ||
|
|
3240d89e81 | ||
|
|
63a649fd7d | ||
|
|
b9e014c044 | ||
|
|
9021cb4258 | ||
|
|
de52fe91f5 | ||
|
|
97ecbb53ff | ||
|
|
14b1a3300b | ||
|
|
f4903c2fe7 | ||
|
|
3d271c25c7 | ||
|
|
3c84010403 | ||
|
|
a69af611e3 | ||
|
|
1f126a2d8a | ||
|
|
c5b325de30 | ||
|
|
100a6f96e4 | ||
|
|
3c583ce7a4 | ||
|
|
0881810780 | ||
|
|
b7f32c5acd | ||
|
|
8f04529ba2 | ||
|
|
54b85b8644 | ||
|
|
562c620fec | ||
|
|
68280bad9e | ||
|
|
33bc86d71a | ||
|
|
3f20c1aa5d | ||
|
|
b265c92852 | ||
|
|
759beed560 | ||
|
|
5df125cbb7 | ||
|
|
74992560b0 | ||
|
|
2035f342f0 | ||
|
|
27fed758c2 | ||
|
|
3ead985caf | ||
|
|
e302e9edd3 | ||
|
|
1fdf820931 | ||
|
|
b4f2eeac0a | ||
|
|
7e3f2ab0c6 | ||
|
|
899be9c3ff | ||
|
|
444231e812 | ||
|
|
1ff6da63e8 | ||
|
|
b5158e1e83 | ||
|
|
3f1e172c6f | ||
|
|
2b5b41790e | ||
|
|
55cd3203fe | ||
|
|
c385cf985b | ||
|
|
45bb13bf43 | ||
|
|
095cba8fba | ||
|
|
2121819c66 | ||
|
|
2f33cd5f0a | ||
|
|
2f5101a1e4 | ||
|
|
3a9b08960a | ||
|
|
c4e7bf2e60 | ||
|
|
4f6a48c327 | ||
|
|
4c61a227ca | ||
|
|
3d2c204f2d | ||
|
|
8b27dec25c | ||
|
|
a9c924b433 | ||
|
|
6cb2296644 | ||
|
|
b2d157a74a | ||
|
|
386cf83285 | ||
|
|
8ef1a50086 | ||
|
|
84651ffd7d | ||
|
|
43c20bb3ed | ||
|
|
d340013d8b | ||
|
|
8a44d9faef | ||
|
|
afb367c7f4 | ||
|
|
84bcf9785f | ||
|
|
fc814b7537 | ||
|
|
0865d8af6c | ||
|
|
cac884401f | ||
|
|
7251cccd03 | ||
|
|
ddfcacbb62 | ||
|
|
3b26d64a5d | ||
|
|
3b0f576d56 | ||
|
|
454f8b36f4 | ||
|
|
1754745c42 | ||
|
|
6f30dfa41c | ||
|
|
33350248c8 | ||
|
|
69c59d3de3 | ||
|
|
8dfebbb3e7 | ||
|
|
737ad3ec19 | ||
|
|
4ec4710811 | ||
|
|
c5caac95dd | ||
|
|
7acbb1e140 | ||
|
|
a5e5afd123 | ||
|
|
c70e9abf70 | ||
|
|
f8d70249a7 | ||
|
|
a2c96d40d3 | ||
|
|
05dd8e0d62 | ||
|
|
4182e631d6 | ||
|
|
ddea0b1570 | ||
|
|
beb532e2a7 | ||
|
|
be045a7636 | ||
|
|
e3a6d63b52 | ||
|
|
6f8c414a75 | ||
|
|
2ec80a1ae2 | ||
|
|
ed147f80ac | ||
|
|
c37ed05f49 | ||
|
|
c1a5a545b6 | ||
|
|
35537e0b0b | ||
|
|
ee80fc87c9 | ||
|
|
bb43bf122e | ||
|
|
34590297c1 | ||
|
|
9e43f7b419 | ||
|
|
94733a4a18 | ||
|
|
ad68245186 | ||
|
|
29fb4d5e2a | ||
|
|
ca27bcaac7 | ||
|
|
53397e28fc | ||
|
|
7c2c17129f | ||
|
|
446fce6c16 | ||
|
|
a99538cd5f | ||
|
|
f67043801b | ||
|
|
941da56ee3 | ||
|
|
41262b008b | ||
|
|
fc4c5d2718 | ||
|
|
a75b327b37 | ||
|
|
c70ae91d34 | ||
|
|
e88480c7c4 | ||
|
|
b565ec1497 | ||
|
|
3e77c1d8c8 | ||
|
|
dc7af47371 | ||
|
|
064d9d5ff8 | ||
|
|
93f8b31eec | ||
|
|
466e1a7aac | ||
|
|
cc37eb870f | ||
|
|
5567653c96 | ||
|
|
5e867f7ce0 | ||
|
|
60acdf8574 | ||
|
|
93864009cc | ||
|
|
223df5a433 | ||
|
|
3580b3a4ef | ||
|
|
66b6e47494 | ||
|
|
6c3dd83ae5 | ||
|
|
48a5f4db2d | ||
|
|
224892e692 | ||
|
|
691a9ae4b1 | ||
|
|
e8a818f53d | ||
|
|
478f374b9d | ||
|
|
10567b150c | ||
|
|
a439f57d70 | ||
|
|
d243504296 | ||
|
|
a7fe2abca4 | ||
|
|
26da478b5b | ||
|
|
13d38d59bf | ||
|
|
4264abda23 | ||
|
|
dbb670a9ee | ||
|
|
a92e36ab83 | ||
|
|
ad06828685 | ||
|
|
8f1b697b91 | ||
|
|
bb4d573862 | ||
|
|
aa5a1f333a | ||
|
|
1f18f0ba77 | ||
|
|
44b24652d2 | ||
|
|
5dcf79233e | ||
|
|
846d27354b | ||
|
|
c1aa4120ac | ||
|
|
6394efc4c2 | ||
|
|
9716834380 | ||
|
|
2f2e42e72d | ||
|
|
776e55d209 | ||
|
|
3362fb8476 | ||
|
|
6d93b36279 | ||
|
|
982e989886 | ||
|
|
0014ed3114 | ||
|
|
ab07e9480e | ||
|
|
00e957051e | ||
|
|
f3b60a1dab | ||
|
|
cd0523c3f1 | ||
|
|
7f318ee964 | ||
|
|
dc1656da8e | ||
|
|
dc0bd9f25d | ||
|
|
52d8007b12 | ||
|
|
4f8382b159 | ||
|
|
c2c82be556 | ||
|
|
421a23ee3d | ||
|
|
191ea340ed | ||
|
|
8d22972d84 | ||
|
|
8772b5af87 | ||
|
|
df2e7cde53 | ||
|
|
02b2ae6142 | ||
|
|
f813eb7ca4 | ||
|
|
d072edaa49 | ||
|
|
e3daa907c5 | ||
|
|
a39223822a | ||
|
|
1eb6cd38ce | ||
|
|
eb6ad3ef9c | ||
|
|
3bef4f4413 | ||
|
|
9f89881b0d | ||
|
|
f244439b4f | ||
|
|
30fd546c12 | ||
|
|
126aefc207 | ||
|
|
e7a60555d6 | ||
|
|
ae912c4c3f | ||
|
|
13ea29e511 | ||
|
|
5342df26fe | ||
|
|
a930977460 | ||
|
|
a3b8c2b71f | ||
|
|
39f808714d | ||
|
|
61bc95e8d6 | ||
|
|
8adf6141e0 | ||
|
|
df3f282e4d | ||
|
|
d81855015b | ||
|
|
feb53104e5 | ||
|
|
881c37393f | ||
|
|
9e98a25e45 | ||
|
|
fb73b83abe | ||
|
|
29b74424ad | ||
|
|
b4cafec8b3 | ||
|
|
d43cd40807 | ||
|
|
0301d8f239 | ||
|
|
074744b8a6 | ||
|
|
2d45124d9b | ||
|
|
40e7284d70 | ||
|
|
4d8d34cc93 | ||
|
|
5cced0af02 | ||
|
|
9c60e9689f | ||
|
|
2052537681 | ||
|
|
a9bb64c55a | ||
|
|
9e31d6ceff | ||
|
|
139ec8c782 | ||
|
|
2691999bd3 | ||
|
|
48460678df | ||
|
|
cb15e5c67e | ||
|
|
7380808b26 |
@@ -1,28 +1,26 @@
|
||||
---
|
||||
name: New sprint issue
|
||||
about: ⚠️ Should only be used by the engine team ⚠️
|
||||
name: New feature issue
|
||||
about: ⚠️ Should only be used by the internal Meili team ⚠️
|
||||
title: ''
|
||||
labels: 'missing usage in PRD, impacts docs'
|
||||
labels: 'impacts docs, impacts integrations'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
Related product team resources: [PRD]() (_internal only_)
|
||||
Related product discussion:
|
||||
|
||||
## Motivation
|
||||
|
||||
<!---Copy/paste the information in PRD or briefly detail the product motivation. Ask product team if any hesitation.-->
|
||||
|
||||
## Usage
|
||||
|
||||
<!---Link to the public part of the PRD, or to the related product discussion for experimental features-->
|
||||
|
||||
TBD
|
||||
|
||||
## TODO
|
||||
|
||||
<!---If necessary, create a list with technical/product steps-->
|
||||
|
||||
### Are you modifying a database?
|
||||
|
||||
- [ ] If not, add the `no db change` label to your PR, and you're good to merge.
|
||||
- [ ] If yes, add the `db change` label to your PR. You'll receive a message explaining you what to do.
|
||||
|
||||
@@ -54,5 +52,5 @@ Related product discussion:
|
||||
|
||||
## Impacted teams
|
||||
|
||||
<!---Ping the related teams. Ask for the engine manager if any hesitation-->
|
||||
<!---@meilisearch/docs-team when there is any API change, e.g. settings addition-->
|
||||
<!---Ping the related teams. Ask on Slack if any hesitation-->
|
||||
<!---@meilisearch/docs-team and @meilisearch/integration-team when there is any API change, e.g. settings addition-->
|
||||
16
.github/pull_request_template.md
vendored
Normal file
16
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
## Related issue
|
||||
|
||||
Fixes #...
|
||||
|
||||
## Requirements
|
||||
|
||||
⚠️ Ensure the following requirements before merging ⚠️
|
||||
- [ ] Automated tests have been added.
|
||||
- [ ] If some tests cannot be automated, manual rigorous tests should be applied.
|
||||
- [ ] ⚠️ If there is any change in the DB:
|
||||
- [ ] Test that any impacted DB still works as expected after using `--experimental-dumpless-upgrade` on a DB created with the last released Meilisearch
|
||||
- [ ] Test that during the upgrade, **search is still available** (artificially make the upgrade longer if needed)
|
||||
- [ ] Set the `db change` label.
|
||||
- [ ] If necessary, the feature have been tested in the Cloud production environment (with [prototypes](./documentation/prototypes.md)) and the Cloud UI is ready.
|
||||
- [ ] If necessary, the [documentation](https://github.com/meilisearch/documentation) related to the implemented feature in the PR is ready.
|
||||
- [ ] If necessary, the [integrations](https://github.com/meilisearch/integration-guides) related to the implemented feature in the PR are ready.
|
||||
33
.github/release-draft-template.yml
vendored
Normal file
33
.github/release-draft-template.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name-template: 'v$RESOLVED_VERSION'
|
||||
tag-template: 'v$RESOLVED_VERSION'
|
||||
exclude-labels:
|
||||
- 'skip changelog'
|
||||
version-resolver:
|
||||
minor:
|
||||
labels:
|
||||
- 'enhancement'
|
||||
default: patch
|
||||
categories:
|
||||
- title: '⚠️ Breaking changes'
|
||||
label: 'breaking-change'
|
||||
- title: '🚀 Enhancements'
|
||||
label: 'enhancement'
|
||||
- title: '🐛 Bug Fixes'
|
||||
label: 'bug'
|
||||
- title: '🔒 Security'
|
||||
label: 'security'
|
||||
- title: '⚙️ Maintenance/misc'
|
||||
label:
|
||||
- 'maintenance'
|
||||
- 'documentation'
|
||||
template: |
|
||||
$CHANGES
|
||||
|
||||
❤️ Huge thanks to our contributors: $CONTRIBUTORS.
|
||||
no-changes-template: 'Changes are coming soon 😎'
|
||||
sort-direction: 'ascending'
|
||||
replacers:
|
||||
- search: '/(?:and )?@dependabot-preview(?:\[bot\])?,?/g'
|
||||
replace: ''
|
||||
- search: '/(?:and )?@dependabot(?:\[bot\])?,?/g'
|
||||
replace: ''
|
||||
22
.github/templates/dependency-issue.md
vendored
Normal file
22
.github/templates/dependency-issue.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
This issue is about updating Meilisearch dependencies:
|
||||
- [ ] Update Meilisearch dependencies with the help of `cargo +nightly udeps --all-targets` (remove unused dependencies) and `cargo upgrade` (upgrade dependencies versions) - ⚠️ Some repositories may contain subdirectories (like heed, charabia, or deserr). Take care of updating these in the main crate as well. This won't be done automatically by `cargo upgrade`.
|
||||
- [ ] [deserr](https://github.com/meilisearch/deserr)
|
||||
- [ ] [charabia](https://github.com/meilisearch/charabia/)
|
||||
- [ ] [heed](https://github.com/meilisearch/heed/)
|
||||
- [ ] [roaring-rs](https://github.com/RoaringBitmap/roaring-rs/)
|
||||
- [ ] [obkv](https://github.com/meilisearch/obkv)
|
||||
- [ ] [grenad](https://github.com/meilisearch/grenad/)
|
||||
- [ ] [arroy](https://github.com/meilisearch/arroy/)
|
||||
- [ ] [segment](https://github.com/meilisearch/segment)
|
||||
- [ ] [bumparaw-collections](https://github.com/meilisearch/bumparaw-collections)
|
||||
- [ ] [bbqueue](https://github.com/meilisearch/bbqueue)
|
||||
- [ ] Finally, [Meilisearch](https://github.com/meilisearch/MeiliSearch)
|
||||
- [ ] If new Rust versions have been released, update the minimal Rust version in use at Meilisearch:
|
||||
- [ ] in this [GitHub Action file](https://github.com/meilisearch/meilisearch/blob/main/.github/workflows/test-suite.yml), by changing the `toolchain` field of the `rustfmt` job to the latest available nightly (of the day before or the current day).
|
||||
- [ ] in every [GitHub Action files](https://github.com/meilisearch/meilisearch/blob/main/.github/workflows), by changing all the `dtolnay/rust-toolchain@` references to use the latest stable version.
|
||||
- [ ] in this [`rust-toolchain.toml`](https://github.com/meilisearch/meilisearch/blob/main/rust-toolchain.toml), by changing the `channel` field to the latest stable version.
|
||||
- [ ] in the [Dockerfile](https://github.com/meilisearch/meilisearch/blob/main/Dockerfile), by changing the base image to `rust:<target_rust_version>-alpine<alpine_version>`. Check that the image exists on [Dockerhub](https://hub.docker.com/_/rust/tags?page=1&name=alpine). Also, build and run the image to check everything still works!
|
||||
|
||||
⚠️ This issue should be prioritized to avoid any deprecation and vulnerability issues.
|
||||
|
||||
The GitHub action dependencies are managed by [Dependabot](https://github.com/meilisearch/meilisearch/blob/main/.github/dependabot.yml), so no need to update them when solving this issue.
|
||||
100
.github/workflows/check-valid-milestone.yml
vendored
100
.github/workflows/check-valid-milestone.yml
vendored
@@ -1,100 +0,0 @@
|
||||
name: PR Milestone Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened, edited, synchronize, milestoned, demilestoned]
|
||||
branches:
|
||||
- "main"
|
||||
- "release-v*.*.*"
|
||||
|
||||
jobs:
|
||||
check-milestone:
|
||||
name: Check PR Milestone
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Validate PR milestone
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
// Get PR number directly from the event payload
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
|
||||
// Get PR details
|
||||
const { data: prData } = await github.rest.pulls.get({
|
||||
owner: 'meilisearch',
|
||||
repo: 'meilisearch',
|
||||
pull_number: prNumber
|
||||
});
|
||||
|
||||
// Get base branch name
|
||||
const baseBranch = prData.base.ref;
|
||||
console.log(`Base branch: ${baseBranch}`);
|
||||
|
||||
// Get PR milestone
|
||||
const prMilestone = prData.milestone;
|
||||
if (!prMilestone) {
|
||||
core.setFailed('PR must have a milestone assigned');
|
||||
return;
|
||||
}
|
||||
console.log(`PR milestone: ${prMilestone.title}`);
|
||||
|
||||
// Validate milestone format: vx.y.z
|
||||
const milestoneRegex = /^v\d+\.\d+\.\d+$/;
|
||||
if (!milestoneRegex.test(prMilestone.title)) {
|
||||
core.setFailed(`Milestone "${prMilestone.title}" does not follow the required format vx.y.z`);
|
||||
return;
|
||||
}
|
||||
|
||||
// For main branch PRs, check if the milestone is the highest one
|
||||
if (baseBranch === 'main') {
|
||||
// Get all milestones
|
||||
const { data: milestones } = await github.rest.issues.listMilestones({
|
||||
owner: 'meilisearch',
|
||||
repo: 'meilisearch',
|
||||
state: 'open',
|
||||
sort: 'due_on',
|
||||
direction: 'desc'
|
||||
});
|
||||
|
||||
// Sort milestones by version number (vx.y.z)
|
||||
const sortedMilestones = milestones
|
||||
.filter(m => milestoneRegex.test(m.title))
|
||||
.sort((a, b) => {
|
||||
const versionA = a.title.substring(1).split('.').map(Number);
|
||||
const versionB = b.title.substring(1).split('.').map(Number);
|
||||
|
||||
// Compare major version
|
||||
if (versionA[0] !== versionB[0]) return versionB[0] - versionA[0];
|
||||
// Compare minor version
|
||||
if (versionA[1] !== versionB[1]) return versionB[1] - versionA[1];
|
||||
// Compare patch version
|
||||
return versionB[2] - versionA[2];
|
||||
});
|
||||
|
||||
if (sortedMilestones.length === 0) {
|
||||
core.setFailed('No valid milestones found in the repository. Please create at least one milestone with the format vx.y.z');
|
||||
return;
|
||||
}
|
||||
|
||||
const highestMilestone = sortedMilestones[0];
|
||||
console.log(`Highest milestone: ${highestMilestone.title}`);
|
||||
|
||||
if (prMilestone.title !== highestMilestone.title) {
|
||||
core.setFailed(`PRs targeting the main branch must use the highest milestone (${highestMilestone.title}), but this PR uses ${prMilestone.title}`);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
// For release branches, the milestone should match the branch version
|
||||
const branchVersion = baseBranch.substring(8); // remove 'release-'
|
||||
if (prMilestone.title !== branchVersion) {
|
||||
core.setFailed(`PRs targeting release branch "${baseBranch}" must use the matching milestone "${branchVersion}", but this PR uses "${prMilestone.title}"`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('PR milestone validation passed!');
|
||||
2
.github/workflows/dependency-issue.yml
vendored
2
.github/workflows/dependency-issue.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download the issue template
|
||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/dependency-issue.md > $ISSUE_TEMPLATE
|
||||
run: curl -s https://raw.githubusercontent.com/meilisearch/meilisearch/main/.github/templates/dependency-issue.md > $ISSUE_TEMPLATE
|
||||
- name: Create issue
|
||||
run: |
|
||||
gh issue create \
|
||||
|
||||
2
.github/workflows/flaky-tests.yml
vendored
2
.github/workflows/flaky-tests.yml
vendored
@@ -3,7 +3,7 @@ name: Look for flaky tests
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 12 * * FRI" # Every Friday at 12:00PM
|
||||
- cron: '0 4 * * *' # Every day at 4:00AM
|
||||
|
||||
jobs:
|
||||
flaky:
|
||||
|
||||
224
.github/workflows/milestone-workflow.yml
vendored
224
.github/workflows/milestone-workflow.yml
vendored
@@ -1,224 +0,0 @@
|
||||
name: Milestone's workflow
|
||||
|
||||
# /!\ No git flow are handled here
|
||||
|
||||
# For each Milestone created (not opened!), and if the release is NOT a patch release (only the patch changed)
|
||||
# - the roadmap issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/roadmap-issue.md
|
||||
# - the changelog issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/changelog-issue.md
|
||||
# - update the ruleset to add the current release version to the list of allowed versions and be able to use the merge queue.
|
||||
|
||||
# For each Milestone closed
|
||||
# - the `release_version` label is created
|
||||
# - this label is applied to all issues/PRs in the Milestone
|
||||
|
||||
on:
|
||||
milestone:
|
||||
types: [created, closed]
|
||||
|
||||
env:
|
||||
MILESTONE_VERSION: ${{ github.event.milestone.title }}
|
||||
MILESTONE_URL: ${{ github.event.milestone.html_url }}
|
||||
MILESTONE_DUE_ON: ${{ github.event.milestone.due_on }}
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
|
||||
jobs:
|
||||
# -----------------
|
||||
# MILESTONE CREATED
|
||||
# -----------------
|
||||
|
||||
get-release-version:
|
||||
if: github.event.action == 'created'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
is-patch: ${{ steps.check-patch.outputs.is-patch }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Check if this release is a patch release only
|
||||
id: check-patch
|
||||
run: |
|
||||
echo version: $MILESTONE_VERSION
|
||||
if [[ $MILESTONE_VERSION =~ ^v[0-9]+\.[0-9]+\.0$ ]]; then
|
||||
echo 'This is NOT a patch release'
|
||||
echo "is-patch=false" >> $GITHUB_OUTPUT
|
||||
elif [[ $MILESTONE_VERSION =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo 'This is a patch release'
|
||||
echo "is-patch=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Not a valid format of release, check the Milestone's title."
|
||||
echo 'Should be vX.Y.Z'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
create-roadmap-issue:
|
||||
needs: get-release-version
|
||||
# Create the roadmap issue if the release is not only a patch release
|
||||
if: github.event.action == 'created' && needs.get-release-version.outputs.is-patch == 'false'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ISSUE_TEMPLATE: issue-template.md
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download the issue template
|
||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/roadmap-issue.md > $ISSUE_TEMPLATE
|
||||
- name: Replace all empty occurrences in the templates
|
||||
run: |
|
||||
# Replace all <<version>> occurrences
|
||||
sed -i "s/<<version>>/$MILESTONE_VERSION/g" $ISSUE_TEMPLATE
|
||||
|
||||
# Replace all <<milestone_id>> occurrences
|
||||
milestone_id=$(echo $MILESTONE_URL | cut -d '/' -f 7)
|
||||
sed -i "s/<<milestone_id>>/$milestone_id/g" $ISSUE_TEMPLATE
|
||||
|
||||
# Replace release date if exists
|
||||
if [[ ! -z $MILESTONE_DUE_ON ]]; then
|
||||
date=$(echo $MILESTONE_DUE_ON | cut -d 'T' -f 1)
|
||||
sed -i "s/Release date\: 20XX-XX-XX/Release date\: $date/g" $ISSUE_TEMPLATE
|
||||
fi
|
||||
- name: Create the issue
|
||||
run: |
|
||||
gh issue create \
|
||||
--title "$MILESTONE_VERSION ROADMAP" \
|
||||
--label 'epic,impacts docs,impacts integrations,impacts cloud' \
|
||||
--body-file $ISSUE_TEMPLATE \
|
||||
--milestone $MILESTONE_VERSION
|
||||
|
||||
create-changelog-issue:
|
||||
needs: get-release-version
|
||||
# Create the changelog issue if the release is not only a patch release
|
||||
if: github.event.action == 'created' && needs.get-release-version.outputs.is-patch == 'false'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ISSUE_TEMPLATE: issue-template.md
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download the issue template
|
||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/changelog-issue.md > $ISSUE_TEMPLATE
|
||||
- name: Replace all empty occurrences in the templates
|
||||
run: |
|
||||
# Replace all <<version>> occurrences
|
||||
sed -i "s/<<version>>/$MILESTONE_VERSION/g" $ISSUE_TEMPLATE
|
||||
|
||||
# Replace all <<milestone_id>> occurrences
|
||||
milestone_id=$(echo $MILESTONE_URL | cut -d '/' -f 7)
|
||||
sed -i "s/<<milestone_id>>/$milestone_id/g" $ISSUE_TEMPLATE
|
||||
- name: Create the issue
|
||||
run: |
|
||||
gh issue create \
|
||||
--title "Create release changelogs for $MILESTONE_VERSION" \
|
||||
--label 'impacts docs,documentation' \
|
||||
--body-file $ISSUE_TEMPLATE \
|
||||
--milestone $MILESTONE_VERSION \
|
||||
--assignee curquiza
|
||||
|
||||
create-update-version-issue:
|
||||
needs: get-release-version
|
||||
# Create the update-version issue even if the release is a patch release
|
||||
if: github.event.action == 'created'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ISSUE_TEMPLATE: issue-template.md
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download the issue template
|
||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/update-version-issue.md > $ISSUE_TEMPLATE
|
||||
- name: Create the issue
|
||||
run: |
|
||||
gh issue create \
|
||||
--title "Update version in Cargo.toml for $MILESTONE_VERSION" \
|
||||
--label 'maintenance' \
|
||||
--body-file $ISSUE_TEMPLATE \
|
||||
--milestone $MILESTONE_VERSION
|
||||
|
||||
create-update-openapi-issue:
|
||||
needs: get-release-version
|
||||
# Create the openAPI issue if the release is not only a patch release
|
||||
if: github.event.action == 'created' && needs.get-release-version.outputs.is-patch == 'false'
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
ISSUE_TEMPLATE: issue-template.md
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Download the issue template
|
||||
run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/update-openapi-issue.md > $ISSUE_TEMPLATE
|
||||
- name: Create the issue
|
||||
run: |
|
||||
gh issue create \
|
||||
--title "Update Open API file for $MILESTONE_VERSION" \
|
||||
--label 'maintenance' \
|
||||
--body-file $ISSUE_TEMPLATE \
|
||||
--milestone $MILESTONE_VERSION
|
||||
|
||||
update-ruleset:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.action == 'created'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install jq
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y jq
|
||||
- name: Update ruleset
|
||||
env:
|
||||
# gh api repos/meilisearch/meilisearch/rulesets --jq '.[] | {name: .name, id: .id}'
|
||||
RULESET_ID: 4253297
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch_name }}
|
||||
run: |
|
||||
echo "RULESET_ID: ${{ env.RULESET_ID }}"
|
||||
echo "BRANCH_NAME: ${{ env.BRANCH_NAME }}"
|
||||
|
||||
# Get current ruleset conditions
|
||||
CONDITIONS=$(gh api repos/meilisearch/meilisearch/rulesets/${{ env.RULESET_ID }} --jq '{ conditions: .conditions }')
|
||||
|
||||
# Update the conditions by appending the milestone version
|
||||
UPDATED_CONDITIONS=$(echo $CONDITIONS | jq '.conditions.ref_name.include += ["refs/heads/release-'${{ env.MILESTONE_VERSION }}'"]')
|
||||
|
||||
# Update the ruleset from stdin (-)
|
||||
echo $UPDATED_CONDITIONS |
|
||||
gh api repos/meilisearch/meilisearch/rulesets/${{ env.RULESET_ID }} \
|
||||
--method PUT \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--input -
|
||||
|
||||
# ----------------
|
||||
# MILESTONE CLOSED
|
||||
# ----------------
|
||||
|
||||
create-release-label:
|
||||
if: github.event.action == 'closed'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Create the ${{ env.MILESTONE_VERSION }} label
|
||||
run: |
|
||||
label_description="PRs/issues solved in $MILESTONE_VERSION"
|
||||
if [[ ! -z $MILESTONE_DUE_ON ]]; then
|
||||
date=$(echo $MILESTONE_DUE_ON | cut -d 'T' -f 1)
|
||||
label_description="$label_description released on $date"
|
||||
fi
|
||||
|
||||
gh api repos/meilisearch/meilisearch/labels \
|
||||
--method POST \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-f name="$MILESTONE_VERSION" \
|
||||
-f description="$label_description" \
|
||||
-f color='ff5ba3'
|
||||
|
||||
labelize-all-milestone-content:
|
||||
if: github.event.action == 'closed'
|
||||
needs: create-release-label
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Add label ${{ env.MILESTONE_VERSION }} to all PRs in the Milestone
|
||||
run: |
|
||||
prs=$(gh pr list --search milestone:"$MILESTONE_VERSION" --limit 1000 --state all --json number --template '{{range .}}{{tablerow (printf "%v" .number)}}{{end}}')
|
||||
for pr in $prs; do
|
||||
gh pr edit $pr --add-label $MILESTONE_VERSION
|
||||
done
|
||||
- name: Add label ${{ env.MILESTONE_VERSION }} to all issues in the Milestone
|
||||
run: |
|
||||
issues=$(gh issue list --search milestone:"$MILESTONE_VERSION" --limit 1000 --state all --json number --template '{{range .}}{{tablerow (printf "%v" .number)}}{{end}}')
|
||||
for issue in $issues; do
|
||||
gh issue edit $issue --add-label $MILESTONE_VERSION
|
||||
done
|
||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
uses: svenstaro/upload-release-action@2.11.1
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/debian/meilisearch.deb
|
||||
|
||||
17
.github/workflows/publish-docker-images.yml
vendored
17
.github/workflows/publish-docker-images.yml
vendored
@@ -16,6 +16,8 @@ on:
|
||||
jobs:
|
||||
docker:
|
||||
runs-on: docker
|
||||
permissions:
|
||||
id-token: write # This is needed to use Cosign in keyless mode
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
@@ -62,6 +64,9 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # tag=v3.9.2
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -85,6 +90,7 @@ jobs:
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
id: build-and-push
|
||||
with:
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -94,6 +100,17 @@ jobs:
|
||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||
GIT_TAG=${{ github.ref_name }}
|
||||
|
||||
- name: Sign the images with GitHub OIDC Token
|
||||
env:
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
run: |
|
||||
images=""
|
||||
for tag in ${TAGS}; do
|
||||
images+="${tag}@${DIGEST} "
|
||||
done
|
||||
cosign sign --yes ${images}
|
||||
|
||||
# /!\ Don't touch this without checking with Cloud team
|
||||
- name: Send CI information to Cloud team
|
||||
# Do not send if nightly build (i.e. 'schedule' or 'workflow_dispatch' event)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Publish binaries to GitHub release
|
||||
name: Publish assets to GitHub release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.1
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/meilisearch
|
||||
@@ -81,7 +81,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.1
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/${{ matrix.artifact_name }}
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.1
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
@@ -178,9 +178,34 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.1
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
asset_name: ${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
publish-openapi-file:
|
||||
name: Publish OpenAPI file
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Generate OpenAPI file
|
||||
run: |
|
||||
cd crates/openapi-generator
|
||||
cargo run --release -- --pretty --output ../../meilisearch.json
|
||||
- name: Upload OpenAPI to Release
|
||||
# No need to upload for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: ./meilisearch.json
|
||||
asset_name: meilisearch-openapi.json
|
||||
tag: ${{ github.ref }}
|
||||
20
.github/workflows/release-drafter.yml
vendored
Normal file
20
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Release Drafter
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
config-name: release-draft-template.yml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.RELEASE_DRAFTER_TOKEN }}
|
||||
14
.github/workflows/sdks-tests.yml
vendored
14
.github/workflows/sdks-tests.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
required: false
|
||||
default: nightly
|
||||
schedule:
|
||||
- cron: "0 6 * * MON" # Every Monday at 6:00AM
|
||||
- cron: '0 6 * * *' # Every day at 6:00am
|
||||
|
||||
env:
|
||||
MEILI_MASTER_KEY: 'masterKey'
|
||||
@@ -114,7 +114,7 @@ jobs:
|
||||
dep ensure
|
||||
fi
|
||||
- name: Run integration tests
|
||||
run: go test -v ./...
|
||||
run: go test --race -v ./integration
|
||||
|
||||
meilisearch-java-tests:
|
||||
needs: define-docker-image
|
||||
@@ -344,15 +344,23 @@ jobs:
|
||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||
ports:
|
||||
- '7700:7700'
|
||||
env:
|
||||
RAILS_VERSION: '7.0'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-rails
|
||||
- name: Set up Ruby 3
|
||||
- name: Install SQLite dependencies
|
||||
run: sudo apt-get update && sudo apt-get install -y libsqlite3-dev
|
||||
- name: Set up Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3
|
||||
bundler-cache: true
|
||||
- name: Start MongoDB
|
||||
uses: supercharge/mongodb-github-action@1.12.0
|
||||
with:
|
||||
mongodb-version: 8.0
|
||||
- name: Run tests
|
||||
run: bundle exec rspec
|
||||
|
||||
|
||||
2
.github/workflows/test-suite.yml
vendored
2
.github/workflows/test-suite.yml
vendored
@@ -3,7 +3,7 @@ name: Test suite
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Everyday at 5:00am
|
||||
# Every day at 5:00am
|
||||
- cron: "0 5 * * *"
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
@@ -41,5 +41,4 @@ jobs:
|
||||
--title "Update version for the next release ($NEW_VERSION) in Cargo.toml" \
|
||||
--body '⚠️ This PR is automatically generated. Check the new version is the expected one and Cargo.lock has been updated before merging.' \
|
||||
--label 'skip changelog' \
|
||||
--milestone $NEW_VERSION \
|
||||
--base $GITHUB_REF_NAME
|
||||
|
||||
@@ -106,7 +106,19 @@ Run `cargo xtask --help` from the root of the repository to find out what is ava
|
||||
#### Update the openAPI file if the API changed
|
||||
|
||||
To update the openAPI file in the code, see [sprint_issue.md](https://github.com/meilisearch/meilisearch/blob/main/.github/ISSUE_TEMPLATE/sprint_issue.md#reminders-when-modifying-the-api).
|
||||
If you want to update the openAPI file on the [open-api repository](https://github.com/meilisearch/open-api), see [update-openapi-issue.md](https://github.com/meilisearch/engine-team/blob/main/issue-templates/update-openapi-issue.md).
|
||||
|
||||
If you want to generate OpenAPI file manually:
|
||||
|
||||
With swagger:
|
||||
- Starts Meilisearch with the `swagger` feature flag: `cargo run --features swagger`
|
||||
- On a browser, open the following URL: http://localhost:7700/scalar
|
||||
- Click the « Download openAPI file »
|
||||
|
||||
With the internal crate:
|
||||
```bash
|
||||
cd crates/openapi-generator
|
||||
cargo run --release -- --pretty --output meilisearch.json
|
||||
```
|
||||
|
||||
### Logging
|
||||
|
||||
@@ -160,25 +172,37 @@ Some notes on GitHub PRs:
|
||||
The draft PRs are recommended when you want to show that you are working on something and make your work visible.
|
||||
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [GitHub Merge Queues](https://github.blog/news-insights/product-news/github-merge-queue-is-generally-available/) to automatically enforce this requirement without the PR author having to rebase manually.
|
||||
|
||||
## Release Process (for internal team only)
|
||||
|
||||
Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/).
|
||||
|
||||
### Automation to rebase and Merge the PRs
|
||||
## Merging PRs
|
||||
|
||||
This project uses GitHub Merge Queues that helps us manage pull requests merging.
|
||||
|
||||
### How to Publish a new Release
|
||||
Before merging a PR, the maintainer should ensure the following requirements are met
|
||||
- Automated tests have been added.
|
||||
- If some tests cannot be automated, manual rigorous tests should be applied.
|
||||
- ⚠️ If there is an change in the DB: it's mandatory to manually test the `--experimental-dumpless-upgrade` on a DB of the previous Meilisearch minor version (e.g. v1.13 for the v1.14 release).
|
||||
- If necessary, the feature have been tested in the Cloud production environment (with [prototypes](./documentation/prototypes.md)) and the Cloud UI is ready.
|
||||
- If necessary, the [documentation](https://github.com/meilisearch/documentation) related to the implemented feature in the PR is ready.
|
||||
- If necessary, the [integrations](https://github.com/meilisearch/integration-guides) related to the implemented feature in the PR are ready.
|
||||
|
||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/engine-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||
## Publish Process (for internal team only)
|
||||
|
||||
Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/).
|
||||
|
||||
### How to publish a new release
|
||||
|
||||
The full Meilisearch release process is described in [this guide](./documentation/release.md).
|
||||
|
||||
### How to publish a prototype
|
||||
|
||||
Depending on the developed feature, you might need to provide a prototyped version of Meilisearch to make it easier to test by the users.
|
||||
|
||||
This happens in two steps:
|
||||
- [Release the prototype](https://github.com/meilisearch/engine-team/blob/main/resources/prototypes.md#how-to-publish-a-prototype)
|
||||
- [Communicate about it](https://github.com/meilisearch/engine-team/blob/main/resources/prototypes.md#communication)
|
||||
- [Release the prototype](./documentation/prototypes.md#how-to-publish-a-prototype)
|
||||
- [Communicate about it](./documentation/prototypes.md#communication)
|
||||
|
||||
### How to implement and publish an experimental feature
|
||||
|
||||
Here is our [guidelines and process](./documentation/experimental-features.md) to implement and publish an experimental feature.
|
||||
|
||||
### Release assets
|
||||
|
||||
|
||||
74
Cargo.lock
generated
74
Cargo.lock
generated
@@ -580,7 +580,7 @@ source = "git+https://github.com/meilisearch/bbqueue#cbb87cc707b5af415ef203bdaf2
|
||||
|
||||
[[package]]
|
||||
name = "benchmarks"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bumpalo",
|
||||
@@ -736,6 +736,12 @@ dependencies = [
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "boxcar"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "36f64beae40a84da1b4b26ff2761a5b895c12adc41dc25aaee1c4f2bbfe97a6e"
|
||||
|
||||
[[package]]
|
||||
name = "brotli"
|
||||
version = "8.0.1"
|
||||
@@ -770,7 +776,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "build-info"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"time",
|
||||
@@ -1774,7 +1780,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dump"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@@ -2006,7 +2012,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
|
||||
|
||||
[[package]]
|
||||
name = "file-store"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
@@ -2028,9 +2034,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filter-parser"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"levenshtein_automata",
|
||||
"nom",
|
||||
"nom_locate",
|
||||
"unescaper",
|
||||
@@ -2049,7 +2056,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flatten-serde-json"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -2194,7 +2201,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "fuzzers"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"bumpalo",
|
||||
@@ -2994,7 +3001,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "index-scheduler"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"backoff",
|
||||
@@ -3230,7 +3237,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "json-depth-checker"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -3724,7 +3731,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
||||
|
||||
[[package]]
|
||||
name = "meili-snap"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"md5",
|
||||
@@ -3735,7 +3742,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
@@ -3831,7 +3838,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"enum-iterator",
|
||||
@@ -3850,7 +3857,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-types"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
@@ -3885,7 +3892,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilitool"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -3919,7 +3926,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "milli"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"allocator-api2 0.3.0",
|
||||
"arroy",
|
||||
@@ -3927,6 +3934,7 @@ dependencies = [
|
||||
"big_s",
|
||||
"bimap",
|
||||
"bincode",
|
||||
"boxcar",
|
||||
"bstr",
|
||||
"bumpalo",
|
||||
"bumparaw-collections",
|
||||
@@ -3969,6 +3977,7 @@ dependencies = [
|
||||
"obkv",
|
||||
"once_cell",
|
||||
"ordered-float 5.0.0",
|
||||
"papaya",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
"rhai",
|
||||
@@ -4338,6 +4347,17 @@ version = "11.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
|
||||
|
||||
[[package]]
|
||||
name = "openapi-generator"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"meilisearch",
|
||||
"serde_json",
|
||||
"utoipa",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openssl-probe"
|
||||
version = "0.1.6"
|
||||
@@ -4399,6 +4419,16 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "papaya"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f92dd0b07c53a0a0c764db2ace8c541dc47320dad97c2200c2a637ab9dd2328f"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"seize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.4"
|
||||
@@ -4471,7 +4501,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
|
||||
[[package]]
|
||||
name = "permissive-json-pointer"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"serde_json",
|
||||
@@ -5450,6 +5480,16 @@ dependencies = [
|
||||
"time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "seize"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e4b8d813387d566f627f3ea1b914c068aac94c40ae27ec43f5f33bde65abefe7"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.26"
|
||||
@@ -7259,7 +7299,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "xtask"
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"build-info",
|
||||
|
||||
@@ -19,10 +19,11 @@ members = [
|
||||
"crates/tracing-trace",
|
||||
"crates/xtask",
|
||||
"crates/build-info",
|
||||
"crates/openapi-generator",
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.16.0"
|
||||
version = "1.17.1"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
"Clément Renault <clement@meilisearch.com>",
|
||||
|
||||
@@ -119,6 +119,6 @@ Meilisearch is, and will always be, open-source! If you want to contribute to th
|
||||
|
||||
Meilisearch releases and their associated binaries are available on the project's [releases page](https://github.com/meilisearch/meilisearch/releases).
|
||||
|
||||
The binaries are versioned following [SemVer conventions](https://semver.org/). To know more, read our [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md).
|
||||
The binaries are versioned following [SemVer conventions](https://semver.org/). To know more, read our [versioning policy](./documentation/versioning-policy.md).
|
||||
|
||||
Differently from the binaries, crates in this repository are not currently available on [crates.io](https://crates.io/) and do not follow [SemVer conventions](https://semver.org).
|
||||
|
||||
@@ -55,3 +55,7 @@ harness = false
|
||||
[[bench]]
|
||||
name = "sort"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "filter_starts_with"
|
||||
harness = false
|
||||
|
||||
66
crates/benchmarks/benches/filter_starts_with.rs
Normal file
66
crates/benchmarks/benches/filter_starts_with.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
mod datasets_paths;
|
||||
mod utils;
|
||||
|
||||
use criterion::{criterion_group, criterion_main};
|
||||
use milli::update::Settings;
|
||||
use milli::FilterableAttributesRule;
|
||||
use utils::Conf;
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[global_allocator]
|
||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
fn base_conf(builder: &mut Settings) {
|
||||
let displayed_fields = ["geonameid", "name"].iter().map(|s| s.to_string()).collect();
|
||||
builder.set_displayed_fields(displayed_fields);
|
||||
|
||||
let filterable_fields =
|
||||
["name"].iter().map(|s| FilterableAttributesRule::Field(s.to_string())).collect();
|
||||
builder.set_filterable_fields(filterable_fields);
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
const BASE_CONF: Conf = Conf {
|
||||
dataset: datasets_paths::SMOL_ALL_COUNTRIES,
|
||||
dataset_format: "jsonl",
|
||||
queries: &[
|
||||
"",
|
||||
],
|
||||
configure: base_conf,
|
||||
primary_key: Some("geonameid"),
|
||||
..Conf::BASE
|
||||
};
|
||||
|
||||
fn filter_starts_with(c: &mut criterion::Criterion) {
|
||||
#[rustfmt::skip]
|
||||
let confs = &[
|
||||
utils::Conf {
|
||||
group_name: "1 letter",
|
||||
filter: Some("name STARTS WITH e"),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "2 letters",
|
||||
filter: Some("name STARTS WITH es"),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "3 letters",
|
||||
filter: Some("name STARTS WITH est"),
|
||||
..BASE_CONF
|
||||
},
|
||||
|
||||
utils::Conf {
|
||||
group_name: "6 letters",
|
||||
filter: Some("name STARTS WITH estoni"),
|
||||
..BASE_CONF
|
||||
}
|
||||
];
|
||||
|
||||
utils::run_benches(c, confs);
|
||||
}
|
||||
|
||||
criterion_group!(benches, filter_starts_with);
|
||||
criterion_main!(benches);
|
||||
@@ -202,6 +202,10 @@ impl CompatV5ToV6 {
|
||||
pub fn network(&self) -> Result<Option<&v6::Network>> {
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn webhooks(&self) -> Option<&v6::Webhooks> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub enum CompatIndexV5ToV6 {
|
||||
|
||||
@@ -138,6 +138,13 @@ impl DumpReader {
|
||||
DumpReader::Compat(compat) => compat.network(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn webhooks(&self) -> Option<&v6::Webhooks> {
|
||||
match self {
|
||||
DumpReader::Current(current) => current.webhooks(),
|
||||
DumpReader::Compat(compat) => compat.webhooks(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<V6Reader> for DumpReader {
|
||||
@@ -365,6 +372,7 @@ pub(crate) mod test {
|
||||
|
||||
assert_eq!(dump.features().unwrap().unwrap(), RuntimeTogglableFeatures::default());
|
||||
assert_eq!(dump.network().unwrap(), None);
|
||||
assert_eq!(dump.webhooks(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -435,6 +443,43 @@ pub(crate) mod test {
|
||||
insta::assert_snapshot!(network.remotes.get("ms-2").as_ref().unwrap().search_api_key.as_ref().unwrap(), @"foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_dump_v6_webhooks() {
|
||||
let dump = File::open("tests/assets/v6-with-webhooks.dump").unwrap();
|
||||
let dump = DumpReader::open(dump).unwrap();
|
||||
|
||||
// top level infos
|
||||
insta::assert_snapshot!(dump.date().unwrap(), @"2025-07-31 9:21:30.479544 +00:00:00");
|
||||
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @r"
|
||||
Some(
|
||||
cb887dcc-34b3-48d1-addd-9815ae721a81,
|
||||
)
|
||||
");
|
||||
|
||||
// webhooks
|
||||
let webhooks = dump.webhooks().unwrap();
|
||||
insta::assert_json_snapshot!(webhooks, @r#"
|
||||
{
|
||||
"webhooks": {
|
||||
"627ea538-733d-4545-8d2d-03526eb381ce": {
|
||||
"url": "https://example.com/authorization-less",
|
||||
"headers": {}
|
||||
},
|
||||
"771b0a28-ef28-4082-b984-536f82958c65": {
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
},
|
||||
"f3583083-f8a7-4cbf-a5e7-fb3f1e28a7e9": {
|
||||
"url": "https://third.com",
|
||||
"headers": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_dump_v5() {
|
||||
let dump = File::open("tests/assets/v5.dump").unwrap();
|
||||
|
||||
@@ -25,6 +25,7 @@ pub type Key = meilisearch_types::keys::Key;
|
||||
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
||||
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
||||
pub type Network = meilisearch_types::features::Network;
|
||||
pub type Webhooks = meilisearch_types::webhooks::WebhooksDumpView;
|
||||
|
||||
// ===== Other types to clarify the code of the compat module
|
||||
// everything related to the tasks
|
||||
@@ -59,6 +60,7 @@ pub struct V6Reader {
|
||||
keys: BufReader<File>,
|
||||
features: Option<RuntimeTogglableFeatures>,
|
||||
network: Option<Network>,
|
||||
webhooks: Option<Webhooks>,
|
||||
}
|
||||
|
||||
impl V6Reader {
|
||||
@@ -93,8 +95,8 @@ impl V6Reader {
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
let network_file = match fs::read(dump.path().join("network.json")) {
|
||||
Ok(network_file) => Some(network_file),
|
||||
let network = match fs::read(dump.path().join("network.json")) {
|
||||
Ok(network_file) => Some(serde_json::from_reader(&*network_file)?),
|
||||
Err(error) => match error.kind() {
|
||||
// Allows the file to be missing, this will only result in all experimental features disabled.
|
||||
ErrorKind::NotFound => {
|
||||
@@ -104,10 +106,16 @@ impl V6Reader {
|
||||
_ => return Err(error.into()),
|
||||
},
|
||||
};
|
||||
let network = if let Some(network_file) = network_file {
|
||||
Some(serde_json::from_reader(&*network_file)?)
|
||||
} else {
|
||||
None
|
||||
|
||||
let webhooks = match fs::read(dump.path().join("webhooks.json")) {
|
||||
Ok(webhooks_file) => Some(serde_json::from_reader(&*webhooks_file)?),
|
||||
Err(error) => match error.kind() {
|
||||
ErrorKind::NotFound => {
|
||||
debug!("`webhooks.json` not found in dump");
|
||||
None
|
||||
}
|
||||
_ => return Err(error.into()),
|
||||
},
|
||||
};
|
||||
|
||||
Ok(V6Reader {
|
||||
@@ -119,6 +127,7 @@ impl V6Reader {
|
||||
features,
|
||||
network,
|
||||
dump,
|
||||
webhooks,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -229,6 +238,10 @@ impl V6Reader {
|
||||
pub fn network(&self) -> Option<&Network> {
|
||||
self.network.as_ref()
|
||||
}
|
||||
|
||||
pub fn webhooks(&self) -> Option<&Webhooks> {
|
||||
self.webhooks.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct UpdateFile {
|
||||
|
||||
@@ -8,6 +8,7 @@ use meilisearch_types::batches::Batch;
|
||||
use meilisearch_types::features::{ChatCompletionSettings, Network, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::keys::Key;
|
||||
use meilisearch_types::settings::{Checked, Settings};
|
||||
use meilisearch_types::webhooks::WebhooksDumpView;
|
||||
use serde_json::{Map, Value};
|
||||
use tempfile::TempDir;
|
||||
use time::OffsetDateTime;
|
||||
@@ -74,6 +75,13 @@ impl DumpWriter {
|
||||
Ok(std::fs::write(self.dir.path().join("network.json"), serde_json::to_string(&network)?)?)
|
||||
}
|
||||
|
||||
pub fn create_webhooks(&self, webhooks: WebhooksDumpView) -> Result<()> {
|
||||
Ok(std::fs::write(
|
||||
self.dir.path().join("webhooks.json"),
|
||||
serde_json::to_string(&webhooks)?,
|
||||
)?)
|
||||
}
|
||||
|
||||
pub fn persist_to(self, mut writer: impl Write) -> Result<()> {
|
||||
let gz_encoder = GzEncoder::new(&mut writer, Compression::default());
|
||||
let mut tar_encoder = tar::Builder::new(gz_encoder);
|
||||
|
||||
BIN
crates/dump/tests/assets/v6-with-webhooks.dump
Normal file
BIN
crates/dump/tests/assets/v6-with-webhooks.dump
Normal file
Binary file not shown.
@@ -15,6 +15,7 @@ license.workspace = true
|
||||
nom = "7.1.3"
|
||||
nom_locate = "4.2.0"
|
||||
unescaper = "0.1.6"
|
||||
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
||||
|
||||
[dev-dependencies]
|
||||
# fixed version due to format breakages in v1.40
|
||||
|
||||
@@ -7,11 +7,22 @@
|
||||
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::char;
|
||||
use nom::character::complete::multispace0;
|
||||
use nom::character::complete::multispace1;
|
||||
use nom::combinator::cut;
|
||||
use nom::combinator::map;
|
||||
use nom::combinator::value;
|
||||
use nom::sequence::preceded;
|
||||
use nom::sequence::{terminated, tuple};
|
||||
use Condition::*;
|
||||
|
||||
use crate::error::IResultExt;
|
||||
use crate::value::parse_vector_value;
|
||||
use crate::value::parse_vector_value_cut;
|
||||
use crate::Error;
|
||||
use crate::ErrorKind;
|
||||
use crate::VectorFilter;
|
||||
use crate::{parse_value, FilterCondition, IResult, Span, Token};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -113,6 +124,83 @@ pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
||||
}
|
||||
|
||||
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter<'_>)> {
|
||||
let (input, _) = multispace0(input)?;
|
||||
let (input, fid) = tag("_vectors")(input)?;
|
||||
|
||||
if let Ok((input, _)) = multispace1::<_, crate::Error>(input) {
|
||||
return Ok((input, (Token::from(fid), None, VectorFilter::None)));
|
||||
}
|
||||
|
||||
let (input, _) = char('.')(input)?;
|
||||
|
||||
// From this point, we are certain this is a vector filter, so our errors must be final.
|
||||
// We could use nom's `cut` but it's better to be explicit about the errors
|
||||
|
||||
if let Ok((_, space)) = tag::<_, _, ()>(" ")(input) {
|
||||
return Err(crate::Error::failure_from_kind(space, ErrorKind::VectorFilterMissingEmbedder));
|
||||
}
|
||||
|
||||
let (input, embedder_name) =
|
||||
parse_vector_value_cut(input, ErrorKind::VectorFilterInvalidEmbedder)?;
|
||||
|
||||
let (input, filter) = alt((
|
||||
map(
|
||||
preceded(tag(".fragments"), |input| {
|
||||
let (input, _) = tag(".")(input).map_cut(ErrorKind::VectorFilterMissingFragment)?;
|
||||
parse_vector_value_cut(input, ErrorKind::VectorFilterInvalidFragment)
|
||||
}),
|
||||
VectorFilter::Fragment,
|
||||
),
|
||||
value(VectorFilter::UserProvided, tag(".userProvided")),
|
||||
value(VectorFilter::DocumentTemplate, tag(".documentTemplate")),
|
||||
value(VectorFilter::Regenerate, tag(".regenerate")),
|
||||
value(VectorFilter::None, nom::combinator::success("")),
|
||||
))(input)?;
|
||||
|
||||
if let Ok((input, point)) = tag::<_, _, ()>(".")(input) {
|
||||
let opt_value = parse_vector_value(input).ok().map(|(_, v)| v);
|
||||
let value =
|
||||
opt_value.as_ref().map(|v| v.value().to_owned()).unwrap_or_else(|| point.to_string());
|
||||
let context = opt_value.map(|v| v.original_span()).unwrap_or(point);
|
||||
let previous_kind = match filter {
|
||||
VectorFilter::Fragment(_) => Some("fragments"),
|
||||
VectorFilter::DocumentTemplate => Some("documentTemplate"),
|
||||
VectorFilter::UserProvided => Some("userProvided"),
|
||||
VectorFilter::Regenerate => Some("regenerate"),
|
||||
VectorFilter::None => None,
|
||||
};
|
||||
return Err(Error::failure_from_kind(
|
||||
context,
|
||||
ErrorKind::VectorFilterUnknownSuffix(previous_kind, value),
|
||||
));
|
||||
}
|
||||
|
||||
let (input, _) = multispace1(input).map_cut(ErrorKind::VectorFilterLeftover)?;
|
||||
|
||||
Ok((input, (Token::from(fid), Some(embedder_name), filter)))
|
||||
}
|
||||
|
||||
/// vectors_exists = vectors ("EXISTS" | ("NOT" WS+ "EXISTS"))
|
||||
pub fn parse_vectors_exists(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, (fid, embedder, filter)) = parse_vectors(input)?;
|
||||
|
||||
// Try parsing "EXISTS" first
|
||||
if let Ok((input, _)) = tag::<_, _, ()>("EXISTS")(input) {
|
||||
return Ok((input, FilterCondition::VectorExists { fid, embedder, filter }));
|
||||
}
|
||||
|
||||
// Try parsing "NOT EXISTS"
|
||||
if let Ok((input, _)) = tuple::<_, _, (), _>((tag("NOT"), multispace1, tag("EXISTS")))(input) {
|
||||
return Ok((
|
||||
input,
|
||||
FilterCondition::Not(Box::new(FilterCondition::VectorExists { fid, embedder, filter })),
|
||||
));
|
||||
}
|
||||
|
||||
Err(crate::Error::failure_from_kind(input, ErrorKind::VectorFilterOperation))
|
||||
}
|
||||
|
||||
/// contains = value "CONTAINS" value
|
||||
pub fn parse_contains(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, (fid, contains, value)) =
|
||||
|
||||
@@ -42,6 +42,23 @@ pub fn cut_with_err<'a, O>(
|
||||
}
|
||||
}
|
||||
|
||||
pub trait IResultExt<'a> {
|
||||
fn map_cut(self, kind: ErrorKind<'a>) -> Self;
|
||||
}
|
||||
|
||||
impl<'a, T> IResultExt<'a> for IResult<'a, T> {
|
||||
fn map_cut(self, kind: ErrorKind<'a>) -> Self {
|
||||
self.map_err(move |e: nom::Err<Error<'a>>| {
|
||||
let input = match e {
|
||||
nom::Err::Incomplete(_) => return e,
|
||||
nom::Err::Error(e) => *e.context(),
|
||||
nom::Err::Failure(e) => *e.context(),
|
||||
};
|
||||
Error::failure_from_kind(input, kind)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error<'a> {
|
||||
context: Span<'a>,
|
||||
@@ -61,6 +78,14 @@ pub enum ErrorKind<'a> {
|
||||
GeoBoundingBox,
|
||||
MisusedGeoRadius,
|
||||
MisusedGeoBoundingBox,
|
||||
VectorFilterLeftover,
|
||||
VectorFilterInvalidQuotes,
|
||||
VectorFilterMissingEmbedder,
|
||||
VectorFilterInvalidEmbedder,
|
||||
VectorFilterMissingFragment,
|
||||
VectorFilterInvalidFragment,
|
||||
VectorFilterUnknownSuffix(Option<&'static str>, String),
|
||||
VectorFilterOperation,
|
||||
InvalidPrimary,
|
||||
InvalidEscapedNumber,
|
||||
ExpectedEof,
|
||||
@@ -91,6 +116,10 @@ impl<'a> Error<'a> {
|
||||
Self { context, kind }
|
||||
}
|
||||
|
||||
pub fn failure_from_kind(context: Span<'a>, kind: ErrorKind<'a>) -> nom::Err<Self> {
|
||||
nom::Err::Failure(Self::new_from_kind(context, kind))
|
||||
}
|
||||
|
||||
pub fn new_from_external(context: Span<'a>, error: impl std::error::Error) -> Self {
|
||||
Self::new_from_kind(context, ErrorKind::External(error.to_string()))
|
||||
}
|
||||
@@ -128,6 +157,20 @@ impl Display for Error<'_> {
|
||||
// first line being the diagnostic and the second line being the incriminated filter.
|
||||
let escaped_input = input.escape_debug();
|
||||
|
||||
fn key_suggestion<'a>(key: &str, keys: &[&'a str]) -> Option<&'a str> {
|
||||
let typos =
|
||||
levenshtein_automata::LevenshteinAutomatonBuilder::new(2, true).build_dfa(key);
|
||||
for key in keys.iter() {
|
||||
match typos.eval(key) {
|
||||
levenshtein_automata::Distance::Exact(_) => {
|
||||
return Some(key);
|
||||
}
|
||||
levenshtein_automata::Distance::AtLeast(_) => continue,
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
match &self.kind {
|
||||
ErrorKind::ExpectedValue(_) if input.trim().is_empty() => {
|
||||
writeln!(f, "Was expecting a value but instead got nothing.")?
|
||||
@@ -169,6 +212,44 @@ impl Display for Error<'_> {
|
||||
ErrorKind::MisusedGeoBoundingBox => {
|
||||
writeln!(f, "The `_geoBoundingBox` filter is an operation and can't be used as a value.")?
|
||||
}
|
||||
ErrorKind::VectorFilterLeftover => {
|
||||
writeln!(f, "The vector filter has leftover tokens.")?
|
||||
}
|
||||
ErrorKind::VectorFilterUnknownSuffix(_, value) if value.as_str() == "." => {
|
||||
writeln!(f, "Was expecting one of `.fragments`, `.userProvided`, `.documentTemplate`, `.regenerate` or nothing, but instead found a point without a valid value.")?;
|
||||
}
|
||||
ErrorKind::VectorFilterUnknownSuffix(None, value) if ["fragments", "userProvided", "documentTemplate", "regenerate"].contains(&value.as_str()) => {
|
||||
// This will happen with "_vectors.rest.\"userProvided\"" for instance
|
||||
writeln!(f, "Was expecting this part to be unquoted.")?
|
||||
}
|
||||
ErrorKind::VectorFilterUnknownSuffix(None, value) => {
|
||||
if let Some(suggestion) = key_suggestion(value, &["fragments", "userProvided", "documentTemplate", "regenerate"]) {
|
||||
writeln!(f, "Was expecting one of `fragments`, `userProvided`, `documentTemplate`, `regenerate` or nothing, but instead found `{value}`. Did you mean `{suggestion}`?")?;
|
||||
} else {
|
||||
writeln!(f, "Was expecting one of `fragments`, `userProvided`, `documentTemplate`, `regenerate` or nothing, but instead found `{value}`.")?;
|
||||
}
|
||||
}
|
||||
ErrorKind::VectorFilterUnknownSuffix(Some(previous_filter_kind), value) => {
|
||||
writeln!(f, "Vector filter can only accept one of `fragments`, `userProvided`, `documentTemplate` or `regenerate`, but found both `{previous_filter_kind}` and `{value}`.")?
|
||||
},
|
||||
ErrorKind::VectorFilterInvalidFragment => {
|
||||
writeln!(f, "The vector filter's fragment name is invalid.")?
|
||||
}
|
||||
ErrorKind::VectorFilterMissingFragment => {
|
||||
writeln!(f, "The vector filter is missing a fragment name.")?
|
||||
}
|
||||
ErrorKind::VectorFilterMissingEmbedder => {
|
||||
writeln!(f, "Was expecting embedder name but found nothing.")?
|
||||
}
|
||||
ErrorKind::VectorFilterInvalidEmbedder => {
|
||||
writeln!(f, "The vector filter's embedder name is invalid.")?
|
||||
}
|
||||
ErrorKind::VectorFilterOperation => {
|
||||
writeln!(f, "Was expecting an operation like `EXISTS` or `NOT EXISTS` after the vector filter.")?
|
||||
}
|
||||
ErrorKind::VectorFilterInvalidQuotes => {
|
||||
writeln!(f, "The quotes in one of the values are inconsistent.")?
|
||||
}
|
||||
ErrorKind::ReservedKeyword(word) => {
|
||||
writeln!(f, "`{word}` is a reserved keyword and thus cannot be used as a field name unless it is put inside quotes. Use \"{word}\" or \'{word}\' instead.")?
|
||||
}
|
||||
|
||||
@@ -65,6 +65,9 @@ use nom_locate::LocatedSpan;
|
||||
pub(crate) use value::parse_value;
|
||||
use value::word_exact;
|
||||
|
||||
use crate::condition::parse_vectors_exists;
|
||||
use crate::error::IResultExt;
|
||||
|
||||
pub type Span<'a> = LocatedSpan<&'a str, &'a str>;
|
||||
|
||||
type IResult<'a, Ret> = nom::IResult<Span<'a>, Ret, Error<'a>>;
|
||||
@@ -136,6 +139,15 @@ impl<'a> From<&'a str> for Token<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum VectorFilter<'a> {
|
||||
Fragment(Token<'a>),
|
||||
DocumentTemplate,
|
||||
UserProvided,
|
||||
Regenerate,
|
||||
None,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum FilterCondition<'a> {
|
||||
Not(Box<Self>),
|
||||
@@ -143,6 +155,7 @@ pub enum FilterCondition<'a> {
|
||||
In { fid: Token<'a>, els: Vec<Token<'a>> },
|
||||
Or(Vec<Self>),
|
||||
And(Vec<Self>),
|
||||
VectorExists { fid: Token<'a>, embedder: Option<Token<'a>>, filter: VectorFilter<'a> },
|
||||
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> },
|
||||
GeoBoundingBox { top_right_point: [Token<'a>; 2], bottom_left_point: [Token<'a>; 2] },
|
||||
}
|
||||
@@ -165,17 +178,32 @@ impl<'a> FilterCondition<'a> {
|
||||
| Condition::Exists
|
||||
| Condition::LowerThan(_)
|
||||
| Condition::LowerThanOrEqual(_)
|
||||
| Condition::Between { .. } => None,
|
||||
Condition::Contains { keyword, word: _ }
|
||||
| Condition::StartsWith { keyword, word: _ } => Some(keyword),
|
||||
| Condition::Between { .. }
|
||||
| Condition::StartsWith { .. } => None,
|
||||
Condition::Contains { keyword, word: _ } => Some(keyword),
|
||||
},
|
||||
FilterCondition::Not(this) => this.use_contains_operator(),
|
||||
FilterCondition::Or(seq) | FilterCondition::And(seq) => {
|
||||
seq.iter().find_map(|filter| filter.use_contains_operator())
|
||||
}
|
||||
FilterCondition::VectorExists { .. }
|
||||
| FilterCondition::GeoLowerThan { .. }
|
||||
| FilterCondition::GeoBoundingBox { .. }
|
||||
| FilterCondition::In { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn use_vector_filter(&self) -> Option<&Token> {
|
||||
match self {
|
||||
FilterCondition::Condition { .. } => None,
|
||||
FilterCondition::Not(this) => this.use_vector_filter(),
|
||||
FilterCondition::Or(seq) | FilterCondition::And(seq) => {
|
||||
seq.iter().find_map(|filter| filter.use_vector_filter())
|
||||
}
|
||||
FilterCondition::GeoLowerThan { .. }
|
||||
| FilterCondition::GeoBoundingBox { .. }
|
||||
| FilterCondition::In { .. } => None,
|
||||
FilterCondition::VectorExists { fid, .. } => Some(fid),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -263,10 +291,7 @@ fn parse_in_body(input: Span) -> IResult<Vec<Token>> {
|
||||
let (input, _) = ws(word_exact("IN"))(input)?;
|
||||
|
||||
// everything after `IN` can be a failure
|
||||
let (input, _) =
|
||||
cut_with_err(tag("["), |_| Error::new_from_kind(input, ErrorKind::InOpeningBracket))(
|
||||
input,
|
||||
)?;
|
||||
let (input, _) = tag("[")(input).map_cut(ErrorKind::InOpeningBracket)?;
|
||||
|
||||
let (input, content) = cut(parse_value_list)(input)?;
|
||||
|
||||
@@ -412,7 +437,7 @@ fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, args) = parsed?;
|
||||
|
||||
if args.len() != 2 || args[0].len() != 2 || args[1].len() != 2 {
|
||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoBoundingBox)));
|
||||
return Err(Error::failure_from_kind(input, ErrorKind::GeoBoundingBox));
|
||||
}
|
||||
|
||||
let res = FilterCondition::GeoBoundingBox {
|
||||
@@ -433,7 +458,7 @@ fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
||||
))(input)
|
||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::ReservedGeo("_geoPoint"))))?;
|
||||
// if we succeeded we still return a `Failure` because geoPoints are not allowed
|
||||
Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::ReservedGeo("_geoPoint"))))
|
||||
Err(Error::failure_from_kind(input, ErrorKind::ReservedGeo("_geoPoint")))
|
||||
}
|
||||
|
||||
/// geoPoint = WS* "_geoDistance(float WS* "," WS* float WS* "," WS* float)
|
||||
@@ -447,7 +472,7 @@ fn parse_geo_distance(input: Span) -> IResult<FilterCondition> {
|
||||
))(input)
|
||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::ReservedGeo("_geoDistance"))))?;
|
||||
// if we succeeded we still return a `Failure` because `geoDistance` filters are not allowed
|
||||
Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::ReservedGeo("_geoDistance"))))
|
||||
Err(Error::failure_from_kind(input, ErrorKind::ReservedGeo("_geoDistance")))
|
||||
}
|
||||
|
||||
/// geo = WS* "_geo(float WS* "," WS* float WS* "," WS* float)
|
||||
@@ -461,7 +486,7 @@ fn parse_geo(input: Span) -> IResult<FilterCondition> {
|
||||
))(input)
|
||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::ReservedGeo("_geo"))))?;
|
||||
// if we succeeded we still return a `Failure` because `_geo` filter is not allowed
|
||||
Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::ReservedGeo("_geo"))))
|
||||
Err(Error::failure_from_kind(input, ErrorKind::ReservedGeo("_geo")))
|
||||
}
|
||||
|
||||
fn parse_error_reserved_keyword(input: Span) -> IResult<FilterCondition> {
|
||||
@@ -500,8 +525,7 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
||||
parse_is_not_null,
|
||||
parse_is_empty,
|
||||
parse_is_not_empty,
|
||||
parse_exists,
|
||||
parse_not_exists,
|
||||
alt((parse_vectors_exists, parse_exists, parse_not_exists)),
|
||||
parse_to,
|
||||
parse_contains,
|
||||
parse_not_contains,
|
||||
@@ -557,6 +581,22 @@ impl std::fmt::Display for FilterCondition<'_> {
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
FilterCondition::VectorExists { fid: _, embedder, filter: inner } => {
|
||||
write!(f, "_vectors")?;
|
||||
if let Some(embedder) = embedder {
|
||||
write!(f, ".{:?}", embedder.value())?;
|
||||
}
|
||||
match inner {
|
||||
VectorFilter::Fragment(fragment) => {
|
||||
write!(f, ".fragments.{:?}", fragment.value())?
|
||||
}
|
||||
VectorFilter::DocumentTemplate => write!(f, ".documentTemplate")?,
|
||||
VectorFilter::UserProvided => write!(f, ".userProvided")?,
|
||||
VectorFilter::Regenerate => write!(f, ".regenerate")?,
|
||||
VectorFilter::None => (),
|
||||
}
|
||||
write!(f, " EXISTS")
|
||||
}
|
||||
FilterCondition::GeoLowerThan { point, radius } => {
|
||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||
}
|
||||
@@ -630,6 +670,9 @@ pub mod tests {
|
||||
insta::assert_snapshot!(p(r"title = 'foo\\\\\\\\'"), @r#"{title} = {foo\\\\}"#);
|
||||
// but it also works with other sequences
|
||||
insta::assert_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}");
|
||||
|
||||
insta::assert_snapshot!(p(r#"_vectors." valid.name ".fragments."also.. valid! " EXISTS"#), @r#"_vectors." valid.name ".fragments."also.. valid! " EXISTS"#);
|
||||
insta::assert_snapshot!(p("_vectors.\"\n\t\r\\\"\" EXISTS"), @r#"_vectors."\n\t\r\"" EXISTS"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -692,6 +735,18 @@ pub mod tests {
|
||||
insta::assert_snapshot!(p("NOT subscribers IS NOT EMPTY"), @"{subscribers} IS EMPTY");
|
||||
insta::assert_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
||||
|
||||
// Test _vectors EXISTS + _vectors NOT EXITS
|
||||
insta::assert_snapshot!(p("_vectors EXISTS"), @"_vectors EXISTS");
|
||||
insta::assert_snapshot!(p("_vectors.embedderName EXISTS"), @r#"_vectors."embedderName" EXISTS"#);
|
||||
insta::assert_snapshot!(p("_vectors.embedderName.documentTemplate EXISTS"), @r#"_vectors."embedderName".documentTemplate EXISTS"#);
|
||||
insta::assert_snapshot!(p("_vectors.embedderName.regenerate EXISTS"), @r#"_vectors."embedderName".regenerate EXISTS"#);
|
||||
insta::assert_snapshot!(p("_vectors.embedderName.regenerate EXISTS"), @r#"_vectors."embedderName".regenerate EXISTS"#);
|
||||
insta::assert_snapshot!(p("_vectors.embedderName.fragments.fragmentName EXISTS"), @r#"_vectors."embedderName".fragments."fragmentName" EXISTS"#);
|
||||
insta::assert_snapshot!(p(" _vectors.embedderName.fragments.fragmentName EXISTS"), @r#"_vectors."embedderName".fragments."fragmentName" EXISTS"#);
|
||||
insta::assert_snapshot!(p("NOT _vectors EXISTS"), @"NOT (_vectors EXISTS)");
|
||||
insta::assert_snapshot!(p(" NOT _vectors EXISTS"), @"NOT (_vectors EXISTS)");
|
||||
insta::assert_snapshot!(p(" _vectors NOT EXISTS"), @"NOT (_vectors EXISTS)");
|
||||
|
||||
// Test EXISTS + NOT EXITS
|
||||
insta::assert_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS");
|
||||
insta::assert_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)");
|
||||
@@ -946,6 +1001,71 @@ pub mod tests {
|
||||
"###
|
||||
);
|
||||
|
||||
insta::assert_snapshot!(p(r#"_vectors _vectors EXISTS"#), @r"
|
||||
Was expecting an operation like `EXISTS` or `NOT EXISTS` after the vector filter.
|
||||
10:25 _vectors _vectors EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors. embedderName EXISTS"#), @r"
|
||||
Was expecting embedder name but found nothing.
|
||||
10:11 _vectors. embedderName EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors .embedderName EXISTS"#), @r"
|
||||
Was expecting an operation like `EXISTS` or `NOT EXISTS` after the vector filter.
|
||||
10:30 _vectors .embedderName EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName. EXISTS"#), @r"
|
||||
Was expecting one of `.fragments`, `.userProvided`, `.documentTemplate`, `.regenerate` or nothing, but instead found a point without a valid value.
|
||||
22:23 _vectors.embedderName. EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors."embedderName EXISTS"#), @r#"
|
||||
The quotes in one of the values are inconsistent.
|
||||
10:30 _vectors."embedderName EXISTS
|
||||
"#);
|
||||
insta::assert_snapshot!(p(r#"_vectors."embedderNam"e EXISTS"#), @r#"
|
||||
The vector filter has leftover tokens.
|
||||
23:31 _vectors."embedderNam"e EXISTS
|
||||
"#);
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName.documentTemplate. EXISTS"#), @r"
|
||||
Was expecting one of `.fragments`, `.userProvided`, `.documentTemplate`, `.regenerate` or nothing, but instead found a point without a valid value.
|
||||
39:40 _vectors.embedderName.documentTemplate. EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName.fragments EXISTS"#), @r"
|
||||
The vector filter is missing a fragment name.
|
||||
32:39 _vectors.embedderName.fragments EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName.fragments. EXISTS"#), @r"
|
||||
The vector filter's fragment name is invalid.
|
||||
33:40 _vectors.embedderName.fragments. EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName.fragments.test test EXISTS"#), @r"
|
||||
Was expecting an operation like `EXISTS` or `NOT EXISTS` after the vector filter.
|
||||
38:49 _vectors.embedderName.fragments.test test EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName.fragments. test EXISTS"#), @r"
|
||||
The vector filter's fragment name is invalid.
|
||||
33:45 _vectors.embedderName.fragments. test EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName .fragments. test EXISTS"#), @r"
|
||||
Was expecting an operation like `EXISTS` or `NOT EXISTS` after the vector filter.
|
||||
23:46 _vectors.embedderName .fragments. test EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName .fragments.test EXISTS"#), @r"
|
||||
Was expecting an operation like `EXISTS` or `NOT EXISTS` after the vector filter.
|
||||
23:45 _vectors.embedderName .fragments.test EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName.fargments.test EXISTS"#), @r"
|
||||
Was expecting one of `fragments`, `userProvided`, `documentTemplate`, `regenerate` or nothing, but instead found `fargments`. Did you mean `fragments`?
|
||||
23:32 _vectors.embedderName.fargments.test EXISTS
|
||||
");
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName."userProvided" EXISTS"#), @r#"
|
||||
Was expecting this part to be unquoted.
|
||||
24:36 _vectors.embedderName."userProvided" EXISTS
|
||||
"#);
|
||||
insta::assert_snapshot!(p(r#"_vectors.embedderName.userProvided.fragments.test EXISTS"#), @r"
|
||||
Vector filter can only accept one of `fragments`, `userProvided`, `documentTemplate` or `regenerate`, but found both `userProvided` and `fragments`.
|
||||
36:45 _vectors.embedderName.userProvided.fragments.test EXISTS
|
||||
");
|
||||
|
||||
insta::assert_snapshot!(p(r#"NOT OR EXISTS AND EXISTS NOT EXISTS"#), @r###"
|
||||
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
||||
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
||||
|
||||
@@ -80,6 +80,51 @@ pub fn word_exact<'a, 'b: 'a>(tag: &'b str) -> impl Fn(Span<'a>) -> IResult<'a,
|
||||
}
|
||||
}
|
||||
|
||||
/// vector_value = ( non_dot_word | singleQuoted | doubleQuoted)
|
||||
pub fn parse_vector_value(input: Span) -> IResult<Token> {
|
||||
pub fn non_dot_word(input: Span) -> IResult<Token> {
|
||||
let (input, word) = take_while1(|c| is_value_component(c) && c != '.')(input)?;
|
||||
Ok((input, word.into()))
|
||||
}
|
||||
|
||||
let (input, value) = alt((
|
||||
delimited(char('\''), cut(|input| quoted_by('\'', input)), cut(char('\''))),
|
||||
delimited(char('"'), cut(|input| quoted_by('"', input)), cut(char('"'))),
|
||||
non_dot_word,
|
||||
))(input)?;
|
||||
|
||||
match unescaper::unescape(value.value()) {
|
||||
Ok(content) => {
|
||||
if content.len() != value.value().len() {
|
||||
Ok((input, Token::new(value.original_span(), Some(content))))
|
||||
} else {
|
||||
Ok((input, value))
|
||||
}
|
||||
}
|
||||
Err(unescaper::Error::IncompleteStr(_)) => Err(nom::Err::Incomplete(nom::Needed::Unknown)),
|
||||
Err(unescaper::Error::ParseIntError { .. }) => Err(nom::Err::Error(Error::new_from_kind(
|
||||
value.original_span(),
|
||||
ErrorKind::InvalidEscapedNumber,
|
||||
))),
|
||||
Err(unescaper::Error::InvalidChar { .. }) => Err(nom::Err::Error(Error::new_from_kind(
|
||||
value.original_span(),
|
||||
ErrorKind::MalformedValue,
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_vector_value_cut<'a>(input: Span<'a>, kind: ErrorKind<'a>) -> IResult<'a, Token<'a>> {
|
||||
parse_vector_value(input).map_err(|e| match e {
|
||||
nom::Err::Failure(e) => match e.kind() {
|
||||
ErrorKind::Char(c) if *c == '"' || *c == '\'' => {
|
||||
crate::Error::failure_from_kind(input, ErrorKind::VectorFilterInvalidQuotes)
|
||||
}
|
||||
_ => crate::Error::failure_from_kind(input, kind),
|
||||
},
|
||||
_ => crate::Error::failure_from_kind(input, kind),
|
||||
})
|
||||
}
|
||||
|
||||
/// value = WS* ( word | singleQuoted | doubleQuoted) WS+
|
||||
pub fn parse_value(input: Span) -> IResult<Token> {
|
||||
// to get better diagnostic message we are going to strip the left whitespaces from the input right now
|
||||
@@ -99,31 +144,21 @@ pub fn parse_value(input: Span) -> IResult<Token> {
|
||||
}
|
||||
|
||||
match parse_geo_radius(input) {
|
||||
Ok(_) => {
|
||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius)))
|
||||
}
|
||||
Ok(_) => return Err(Error::failure_from_kind(input, ErrorKind::MisusedGeoRadius)),
|
||||
// if we encountered a failure it means the user badly wrote a _geoRadius filter.
|
||||
// But instead of showing them how to fix his syntax we are going to tell them they should not use this filter as a value.
|
||||
Err(e) if e.is_failure() => {
|
||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius)))
|
||||
return Err(Error::failure_from_kind(input, ErrorKind::MisusedGeoRadius))
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
match parse_geo_bounding_box(input) {
|
||||
Ok(_) => {
|
||||
return Err(nom::Err::Failure(Error::new_from_kind(
|
||||
input,
|
||||
ErrorKind::MisusedGeoBoundingBox,
|
||||
)))
|
||||
}
|
||||
Ok(_) => return Err(Error::failure_from_kind(input, ErrorKind::MisusedGeoBoundingBox)),
|
||||
// if we encountered a failure it means the user badly wrote a _geoBoundingBox filter.
|
||||
// But instead of showing them how to fix his syntax we are going to tell them they should not use this filter as a value.
|
||||
Err(e) if e.is_failure() => {
|
||||
return Err(nom::Err::Failure(Error::new_from_kind(
|
||||
input,
|
||||
ErrorKind::MisusedGeoBoundingBox,
|
||||
)))
|
||||
return Err(Error::failure_from_kind(input, ErrorKind::MisusedGeoBoundingBox))
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ impl RoFeatures {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(FeatureNotEnabledError {
|
||||
disabled_action: "Using `CONTAINS` or `STARTS WITH` in a filter",
|
||||
disabled_action: "Using `CONTAINS` in a filter",
|
||||
feature: "contains filter",
|
||||
issue_link: "https://github.com/orgs/meilisearch/discussions/763",
|
||||
}
|
||||
@@ -182,6 +182,7 @@ impl FeatureData {
|
||||
..persisted_features
|
||||
}));
|
||||
|
||||
// Once this is stabilized, network should be stored along with webhooks in index-scheduler's persisted database
|
||||
let network_db = runtime_features_db.remap_data_type::<SerdeJson<Network>>();
|
||||
let network: Network = network_db.get(wtxn, db_keys::NETWORK)?.unwrap_or_default();
|
||||
|
||||
|
||||
@@ -71,7 +71,7 @@ pub struct IndexMapper {
|
||||
/// Path to the folder where the LMDB environments of each index are.
|
||||
base_path: PathBuf,
|
||||
/// The map size an index is opened with on the first time.
|
||||
index_base_map_size: usize,
|
||||
pub(crate) index_base_map_size: usize,
|
||||
/// The quantity by which the map size of an index is incremented upon reopening, in bytes.
|
||||
index_growth_amount: usize,
|
||||
/// Whether we open a meilisearch index with the MDB_WRITEMAP option or not.
|
||||
|
||||
@@ -26,11 +26,11 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||
version,
|
||||
queue,
|
||||
scheduler,
|
||||
persisted,
|
||||
|
||||
index_mapper,
|
||||
features: _,
|
||||
webhook_url: _,
|
||||
webhook_authorization_header: _,
|
||||
webhooks: _,
|
||||
test_breakpoint_sdr: _,
|
||||
planned_failures: _,
|
||||
run_loop_iteration: _,
|
||||
@@ -62,6 +62,13 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||
}
|
||||
snap.push_str("\n----------------------------------------------------------------------\n");
|
||||
|
||||
let persisted_db_snapshot = snapshot_persisted_db(&rtxn, persisted);
|
||||
if !persisted_db_snapshot.is_empty() {
|
||||
snap.push_str("### Persisted:\n");
|
||||
snap.push_str(&persisted_db_snapshot);
|
||||
snap.push_str("----------------------------------------------------------------------\n");
|
||||
}
|
||||
|
||||
snap.push_str("### All Tasks:\n");
|
||||
snap.push_str(&snapshot_all_tasks(&rtxn, queue.tasks.all_tasks));
|
||||
snap.push_str("----------------------------------------------------------------------\n");
|
||||
@@ -200,6 +207,16 @@ pub fn snapshot_date_db(rtxn: &RoTxn, db: Database<BEI128, CboRoaringBitmapCodec
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_persisted_db(rtxn: &RoTxn, db: &Database<Str, Str>) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (key, value) = next.unwrap();
|
||||
snap.push_str(&format!("{key}: {value}\n"));
|
||||
}
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_task(task: &Task) -> String {
|
||||
let mut snap = String::new();
|
||||
let Task {
|
||||
@@ -311,6 +328,7 @@ pub fn snapshot_status(
|
||||
}
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_kind(rtxn: &RoTxn, db: Database<SerdeBincode<Kind>, RoaringBitmapCodec>) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
@@ -331,6 +349,7 @@ pub fn snapshot_index_tasks(rtxn: &RoTxn, db: Database<Str, RoaringBitmapCodec>)
|
||||
}
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_canceled_by(rtxn: &RoTxn, db: Database<BEU32, RoaringBitmapCodec>) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
|
||||
@@ -65,13 +65,16 @@ use meilisearch_types::milli::vector::{
|
||||
use meilisearch_types::milli::{self, Index};
|
||||
use meilisearch_types::task_view::TaskView;
|
||||
use meilisearch_types::tasks::{KindWithContent, Task};
|
||||
use meilisearch_types::webhooks::{Webhook, WebhooksDumpView, WebhooksView};
|
||||
use milli::vector::db::IndexEmbeddingConfig;
|
||||
use processing::ProcessingTasks;
|
||||
pub use queue::Query;
|
||||
use queue::Queue;
|
||||
use roaring::RoaringBitmap;
|
||||
use scheduler::Scheduler;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
use versioning::Versioning;
|
||||
|
||||
use crate::index_mapper::IndexMapper;
|
||||
@@ -80,7 +83,15 @@ use crate::utils::clamp_to_page_size;
|
||||
pub(crate) type BEI128 = I128<BE>;
|
||||
|
||||
const TASK_SCHEDULER_SIZE_THRESHOLD_PERCENT_INT: u64 = 40;
|
||||
const CHAT_SETTINGS_DB_NAME: &str = "chat-settings";
|
||||
|
||||
mod db_name {
|
||||
pub const CHAT_SETTINGS: &str = "chat-settings";
|
||||
pub const PERSISTED: &str = "persisted";
|
||||
}
|
||||
|
||||
mod db_keys {
|
||||
pub const WEBHOOKS: &str = "webhooks";
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexSchedulerOptions {
|
||||
@@ -98,10 +109,10 @@ pub struct IndexSchedulerOptions {
|
||||
pub snapshots_path: PathBuf,
|
||||
/// The path to the folder containing the dumps.
|
||||
pub dumps_path: PathBuf,
|
||||
/// The URL on which we must send the tasks statuses
|
||||
pub webhook_url: Option<String>,
|
||||
/// The value we will send into the Authorization HTTP header on the webhook URL
|
||||
pub webhook_authorization_header: Option<String>,
|
||||
/// The webhook url that was set by the CLI.
|
||||
pub cli_webhook_url: Option<String>,
|
||||
/// The Authorization header to send to the webhook URL that was set by the CLI.
|
||||
pub cli_webhook_authorization: Option<String>,
|
||||
/// The maximum size, in bytes, of the task index.
|
||||
pub task_db_size: usize,
|
||||
/// The size, in bytes, with which a meilisearch index is opened the first time of each meilisearch index.
|
||||
@@ -171,10 +182,11 @@ pub struct IndexScheduler {
|
||||
/// Whether we should use the old document indexer or the new one.
|
||||
pub(crate) experimental_no_edition_2024_for_dumps: bool,
|
||||
|
||||
/// The webhook url we should send tasks to after processing every batches.
|
||||
pub(crate) webhook_url: Option<String>,
|
||||
/// The Authorization header to send to the webhook URL.
|
||||
pub(crate) webhook_authorization_header: Option<String>,
|
||||
/// A database to store single-keyed data that is persisted across restarts.
|
||||
persisted: Database<Str, Str>,
|
||||
|
||||
/// Webhook, loaded and stored in the `persisted` database
|
||||
webhooks: Arc<Webhooks>,
|
||||
|
||||
/// A map to retrieve the runtime representation of an embedder depending on its configuration.
|
||||
///
|
||||
@@ -214,8 +226,9 @@ impl IndexScheduler {
|
||||
index_mapper: self.index_mapper.clone(),
|
||||
cleanup_enabled: self.cleanup_enabled,
|
||||
experimental_no_edition_2024_for_dumps: self.experimental_no_edition_2024_for_dumps,
|
||||
webhook_url: self.webhook_url.clone(),
|
||||
webhook_authorization_header: self.webhook_authorization_header.clone(),
|
||||
persisted: self.persisted,
|
||||
|
||||
webhooks: self.webhooks.clone(),
|
||||
embedders: self.embedders.clone(),
|
||||
#[cfg(test)]
|
||||
test_breakpoint_sdr: self.test_breakpoint_sdr.clone(),
|
||||
@@ -234,6 +247,7 @@ impl IndexScheduler {
|
||||
+ IndexMapper::nb_db()
|
||||
+ features::FeatureData::nb_db()
|
||||
+ 1 // chat-prompts
|
||||
+ 1 // persisted
|
||||
}
|
||||
|
||||
/// Create an index scheduler and start its run loop.
|
||||
@@ -284,10 +298,18 @@ impl IndexScheduler {
|
||||
let version = versioning::Versioning::new(&env, from_db_version)?;
|
||||
|
||||
let mut wtxn = env.write_txn()?;
|
||||
|
||||
let features = features::FeatureData::new(&env, &mut wtxn, options.instance_features)?;
|
||||
let queue = Queue::new(&env, &mut wtxn, &options)?;
|
||||
let index_mapper = IndexMapper::new(&env, &mut wtxn, &options, budget)?;
|
||||
let chat_settings = env.create_database(&mut wtxn, Some(CHAT_SETTINGS_DB_NAME))?;
|
||||
let chat_settings = env.create_database(&mut wtxn, Some(db_name::CHAT_SETTINGS))?;
|
||||
|
||||
let persisted = env.create_database(&mut wtxn, Some(db_name::PERSISTED))?;
|
||||
let webhooks_db = persisted.remap_data_type::<SerdeJson<Webhooks>>();
|
||||
let mut webhooks = webhooks_db.get(&wtxn, db_keys::WEBHOOKS)?.unwrap_or_default();
|
||||
webhooks
|
||||
.with_cli(options.cli_webhook_url.clone(), options.cli_webhook_authorization.clone());
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
// allow unreachable_code to get rids of the warning in the case of a test build.
|
||||
@@ -303,8 +325,8 @@ impl IndexScheduler {
|
||||
experimental_no_edition_2024_for_dumps: options
|
||||
.indexer_config
|
||||
.experimental_no_edition_2024_for_dumps,
|
||||
webhook_url: options.webhook_url,
|
||||
webhook_authorization_header: options.webhook_authorization_header,
|
||||
persisted,
|
||||
webhooks: Arc::new(webhooks),
|
||||
embedders: Default::default(),
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -752,86 +774,92 @@ impl IndexScheduler {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Once the tasks changes have been committed we must send all the tasks that were updated to our webhook if there is one.
|
||||
fn notify_webhook(&self, updated: &RoaringBitmap) -> Result<()> {
|
||||
if let Some(ref url) = self.webhook_url {
|
||||
struct TaskReader<'a, 'b> {
|
||||
rtxn: &'a RoTxn<'a>,
|
||||
index_scheduler: &'a IndexScheduler,
|
||||
tasks: &'b mut roaring::bitmap::Iter<'b>,
|
||||
buffer: Vec<u8>,
|
||||
written: usize,
|
||||
}
|
||||
/// Once the tasks changes have been committed we must send all the tasks that were updated to our webhooks
|
||||
fn notify_webhooks(&self, updated: RoaringBitmap) {
|
||||
struct TaskReader<'a, 'b> {
|
||||
rtxn: &'a RoTxn<'a>,
|
||||
index_scheduler: &'a IndexScheduler,
|
||||
tasks: &'b mut roaring::bitmap::Iter<'b>,
|
||||
buffer: Vec<u8>,
|
||||
written: usize,
|
||||
}
|
||||
|
||||
impl Read for TaskReader<'_, '_> {
|
||||
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
if self.buffer.is_empty() {
|
||||
match self.tasks.next() {
|
||||
None => return Ok(0),
|
||||
Some(task_id) => {
|
||||
let task = self
|
||||
.index_scheduler
|
||||
.queue
|
||||
.tasks
|
||||
.get_task(self.rtxn, task_id)
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?
|
||||
.ok_or_else(|| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
Error::CorruptedTaskQueue,
|
||||
)
|
||||
})?;
|
||||
impl Read for TaskReader<'_, '_> {
|
||||
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
if self.buffer.is_empty() {
|
||||
match self.tasks.next() {
|
||||
None => return Ok(0),
|
||||
Some(task_id) => {
|
||||
let task = self
|
||||
.index_scheduler
|
||||
.queue
|
||||
.tasks
|
||||
.get_task(self.rtxn, task_id)
|
||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?
|
||||
.ok_or_else(|| {
|
||||
io::Error::new(io::ErrorKind::Other, Error::CorruptedTaskQueue)
|
||||
})?;
|
||||
|
||||
serde_json::to_writer(
|
||||
&mut self.buffer,
|
||||
&TaskView::from_task(&task),
|
||||
)?;
|
||||
self.buffer.push(b'\n');
|
||||
}
|
||||
serde_json::to_writer(&mut self.buffer, &TaskView::from_task(&task))?;
|
||||
self.buffer.push(b'\n');
|
||||
}
|
||||
}
|
||||
|
||||
let mut to_write = &self.buffer[self.written..];
|
||||
let wrote = io::copy(&mut to_write, &mut buf)?;
|
||||
self.written += wrote as usize;
|
||||
|
||||
// we wrote everything and must refresh our buffer on the next call
|
||||
if self.written == self.buffer.len() {
|
||||
self.written = 0;
|
||||
self.buffer.clear();
|
||||
}
|
||||
|
||||
Ok(wrote as usize)
|
||||
}
|
||||
}
|
||||
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let mut to_write = &self.buffer[self.written..];
|
||||
let wrote = io::copy(&mut to_write, &mut buf)?;
|
||||
self.written += wrote as usize;
|
||||
|
||||
let task_reader = TaskReader {
|
||||
rtxn: &rtxn,
|
||||
index_scheduler: self,
|
||||
tasks: &mut updated.into_iter(),
|
||||
buffer: Vec::with_capacity(50), // on average a task is around ~100 bytes
|
||||
written: 0,
|
||||
};
|
||||
// we wrote everything and must refresh our buffer on the next call
|
||||
if self.written == self.buffer.len() {
|
||||
self.written = 0;
|
||||
self.buffer.clear();
|
||||
}
|
||||
|
||||
// let reader = GzEncoder::new(BufReader::new(task_reader), Compression::default());
|
||||
let reader = GzEncoder::new(BufReader::new(task_reader), Compression::default());
|
||||
let request = ureq::post(url)
|
||||
.timeout(Duration::from_secs(30))
|
||||
.set("Content-Encoding", "gzip")
|
||||
.set("Content-Type", "application/x-ndjson");
|
||||
let request = match &self.webhook_authorization_header {
|
||||
Some(header) => request.set("Authorization", header),
|
||||
None => request,
|
||||
};
|
||||
|
||||
if let Err(e) = request.send(reader) {
|
||||
tracing::error!("While sending data to the webhook: {e}");
|
||||
Ok(wrote as usize)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
let webhooks = self.webhooks.get_all();
|
||||
if webhooks.is_empty() {
|
||||
return;
|
||||
}
|
||||
let this = self.private_clone();
|
||||
// We must take the RoTxn before entering the thread::spawn otherwise another batch may be
|
||||
// processed before we had the time to take our txn.
|
||||
let rtxn = match self.env.clone().static_read_txn() {
|
||||
Ok(rtxn) => rtxn,
|
||||
Err(e) => {
|
||||
tracing::error!("Couldn't get an rtxn to notify the webhook: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
std::thread::spawn(move || {
|
||||
for (uuid, Webhook { url, headers }) in webhooks.iter() {
|
||||
let task_reader = TaskReader {
|
||||
rtxn: &rtxn,
|
||||
index_scheduler: &this,
|
||||
tasks: &mut updated.iter(),
|
||||
buffer: Vec::with_capacity(page_size::get()),
|
||||
written: 0,
|
||||
};
|
||||
|
||||
let reader = GzEncoder::new(BufReader::new(task_reader), Compression::default());
|
||||
|
||||
let mut request = ureq::post(url)
|
||||
.timeout(Duration::from_secs(30))
|
||||
.set("Content-Encoding", "gzip")
|
||||
.set("Content-Type", "application/x-ndjson");
|
||||
for (header_name, header_value) in headers.iter() {
|
||||
request = request.set(header_name, header_value);
|
||||
}
|
||||
|
||||
if let Err(e) = request.send(reader) {
|
||||
tracing::error!("While sending data to the webhook {uuid}: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn index_stats(&self, index_uid: &str) -> Result<IndexStats> {
|
||||
@@ -862,6 +890,29 @@ impl IndexScheduler {
|
||||
self.features.network()
|
||||
}
|
||||
|
||||
pub fn update_runtime_webhooks(&self, runtime: RuntimeWebhooks) -> Result<()> {
|
||||
let webhooks = Webhooks::from_runtime(runtime);
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let webhooks_db = self.persisted.remap_data_type::<SerdeJson<Webhooks>>();
|
||||
webhooks_db.put(&mut wtxn, db_keys::WEBHOOKS, &webhooks)?;
|
||||
wtxn.commit()?;
|
||||
self.webhooks.update_runtime(webhooks.into_runtime());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn webhooks_dump_view(&self) -> WebhooksDumpView {
|
||||
// We must not dump the cli api key
|
||||
WebhooksDumpView { webhooks: self.webhooks.get_runtime() }
|
||||
}
|
||||
|
||||
pub fn webhooks_view(&self) -> WebhooksView {
|
||||
WebhooksView { webhooks: self.webhooks.get_all() }
|
||||
}
|
||||
|
||||
pub fn retrieve_runtime_webhooks(&self) -> RuntimeWebhooks {
|
||||
self.webhooks.get_runtime()
|
||||
}
|
||||
|
||||
pub fn embedders(
|
||||
&self,
|
||||
index_uid: String,
|
||||
@@ -990,3 +1041,72 @@ pub struct IndexStats {
|
||||
/// Internal stats computed from the index.
|
||||
pub inner_stats: index_mapper::IndexStats,
|
||||
}
|
||||
|
||||
/// These structure are not meant to be exposed to the end user, if needed, use the meilisearch-types::webhooks structure instead.
|
||||
/// /!\ Everytime you deserialize this structure you should fill the cli_webhook later on with the `with_cli` method. /!\
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct Webhooks {
|
||||
// The cli webhook should *never* be stored in a database.
|
||||
// It represent a state that only exists for this execution of meilisearch
|
||||
#[serde(skip)]
|
||||
pub cli: Option<CliWebhook>,
|
||||
|
||||
#[serde(default)]
|
||||
pub runtime: RwLock<RuntimeWebhooks>,
|
||||
}
|
||||
|
||||
type RuntimeWebhooks = BTreeMap<Uuid, Webhook>;
|
||||
|
||||
impl Webhooks {
|
||||
pub fn with_cli(&mut self, url: Option<String>, auth: Option<String>) {
|
||||
if let Some(url) = url {
|
||||
let webhook = CliWebhook { url, auth };
|
||||
self.cli = Some(webhook);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_runtime(webhooks: RuntimeWebhooks) -> Self {
|
||||
Self { cli: None, runtime: RwLock::new(webhooks) }
|
||||
}
|
||||
|
||||
pub fn into_runtime(self) -> RuntimeWebhooks {
|
||||
// safe because we own self and it cannot be cloned
|
||||
self.runtime.into_inner().unwrap()
|
||||
}
|
||||
|
||||
pub fn update_runtime(&self, webhooks: RuntimeWebhooks) {
|
||||
*self.runtime.write().unwrap() = webhooks;
|
||||
}
|
||||
|
||||
/// Returns all the webhooks in an unified view. The cli webhook is represented with an uuid set to 0
|
||||
pub fn get_all(&self) -> BTreeMap<Uuid, Webhook> {
|
||||
self.cli
|
||||
.as_ref()
|
||||
.map(|wh| (Uuid::nil(), Webhook::from(wh)))
|
||||
.into_iter()
|
||||
.chain(self.runtime.read().unwrap().iter().map(|(uuid, wh)| (*uuid, wh.clone())))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns all the runtime webhooks.
|
||||
pub fn get_runtime(&self) -> BTreeMap<Uuid, Webhook> {
|
||||
self.runtime.read().unwrap().iter().map(|(uuid, wh)| (*uuid, wh.clone())).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Default, Clone, PartialEq)]
|
||||
struct CliWebhook {
|
||||
pub url: String,
|
||||
pub auth: Option<String>,
|
||||
}
|
||||
|
||||
impl From<&CliWebhook> for Webhook {
|
||||
fn from(webhook: &CliWebhook) -> Self {
|
||||
let mut headers = BTreeMap::new();
|
||||
if let Some(ref auth) = webhook.auth {
|
||||
headers.insert("Authorization".to_string(), auth.to_string());
|
||||
}
|
||||
Self { url: webhook.url.to_string(), headers }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,6 +108,7 @@ make_enum_progress! {
|
||||
DumpTheBatches,
|
||||
DumpTheIndexes,
|
||||
DumpTheExperimentalFeatures,
|
||||
DumpTheWebhooks,
|
||||
CompressTheDump,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -446,8 +446,7 @@ impl IndexScheduler {
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// We shouldn't crash the tick function if we can't send data to the webhook.
|
||||
let _ = self.notify_webhook(&ids);
|
||||
self.notify_webhooks(ids);
|
||||
|
||||
#[cfg(test)]
|
||||
self.breakpoint(crate::test_utils::Breakpoint::AfterProcessing);
|
||||
|
||||
@@ -270,6 +270,11 @@ impl IndexScheduler {
|
||||
let network = self.network();
|
||||
dump.create_network(network)?;
|
||||
|
||||
// 7. Dump the webhooks
|
||||
progress.update_progress(DumpCreationProgress::DumpTheWebhooks);
|
||||
let webhooks = self.webhooks_dump_view();
|
||||
dump.create_webhooks(webhooks)?;
|
||||
|
||||
let dump_uid = started_at.format(format_description!(
|
||||
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||
)).unwrap();
|
||||
|
||||
@@ -7,9 +7,73 @@ use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
||||
use meilisearch_types::tasks::{Status, Task};
|
||||
use meilisearch_types::{compression, VERSION_FILE_NAME};
|
||||
|
||||
use crate::heed::EnvOpenOptions;
|
||||
use crate::processing::{AtomicUpdateFileStep, SnapshotCreationProgress};
|
||||
use crate::queue::TaskQueue;
|
||||
use crate::{Error, IndexScheduler, Result};
|
||||
|
||||
/// # Safety
|
||||
///
|
||||
/// See [`EnvOpenOptions::open`].
|
||||
unsafe fn remove_tasks(
|
||||
tasks: &[Task],
|
||||
dst: &std::path::Path,
|
||||
index_base_map_size: usize,
|
||||
) -> Result<()> {
|
||||
let env_options = EnvOpenOptions::new();
|
||||
let mut env_options = env_options.read_txn_without_tls();
|
||||
let env = env_options.max_dbs(TaskQueue::nb_db()).map_size(index_base_map_size).open(dst)?;
|
||||
let mut wtxn = env.write_txn()?;
|
||||
let task_queue = TaskQueue::new(&env, &mut wtxn)?;
|
||||
|
||||
// Destructuring to ensure the code below gets updated if a database gets added in the future.
|
||||
let TaskQueue {
|
||||
all_tasks,
|
||||
status,
|
||||
kind,
|
||||
index_tasks: _, // snapshot creation tasks are not index tasks
|
||||
canceled_by,
|
||||
enqueued_at,
|
||||
started_at,
|
||||
finished_at,
|
||||
} = task_queue;
|
||||
|
||||
for task in tasks {
|
||||
all_tasks.delete(&mut wtxn, &task.uid)?;
|
||||
|
||||
let mut tasks = status.get(&wtxn, &task.status)?.unwrap_or_default();
|
||||
tasks.remove(task.uid);
|
||||
status.put(&mut wtxn, &task.status, &tasks)?;
|
||||
|
||||
let mut tasks = kind.get(&wtxn, &task.kind.as_kind())?.unwrap_or_default();
|
||||
tasks.remove(task.uid);
|
||||
kind.put(&mut wtxn, &task.kind.as_kind(), &tasks)?;
|
||||
|
||||
canceled_by.delete(&mut wtxn, &task.uid)?;
|
||||
|
||||
let timestamp = task.enqueued_at.unix_timestamp_nanos();
|
||||
let mut tasks = enqueued_at.get(&wtxn, ×tamp)?.unwrap_or_default();
|
||||
tasks.remove(task.uid);
|
||||
enqueued_at.put(&mut wtxn, ×tamp, &tasks)?;
|
||||
|
||||
if let Some(task_started_at) = task.started_at {
|
||||
let timestamp = task_started_at.unix_timestamp_nanos();
|
||||
let mut tasks = started_at.get(&wtxn, ×tamp)?.unwrap_or_default();
|
||||
tasks.remove(task.uid);
|
||||
started_at.put(&mut wtxn, ×tamp, &tasks)?;
|
||||
}
|
||||
|
||||
if let Some(task_finished_at) = task.finished_at {
|
||||
let timestamp = task_finished_at.unix_timestamp_nanos();
|
||||
let mut tasks = finished_at.get(&wtxn, ×tamp)?.unwrap_or_default();
|
||||
tasks.remove(task.uid);
|
||||
finished_at.put(&mut wtxn, ×tamp, &tasks)?;
|
||||
}
|
||||
}
|
||||
wtxn.commit()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl IndexScheduler {
|
||||
pub(super) fn process_snapshot(
|
||||
&self,
|
||||
@@ -48,14 +112,26 @@ impl IndexScheduler {
|
||||
};
|
||||
self.env.copy_to_path(dst.join("data.mdb"), compaction_option)?;
|
||||
|
||||
// 2.2 Create a read transaction on the index-scheduler
|
||||
// 2.2 Remove the current snapshot tasks
|
||||
//
|
||||
// This is done to ensure that the tasks are not processed again when the snapshot is imported
|
||||
//
|
||||
// # Safety
|
||||
//
|
||||
// This is safe because we open the env file we just created in a temporary directory.
|
||||
// We are sure it's not being used by any other process nor thread.
|
||||
unsafe {
|
||||
remove_tasks(&tasks, &dst, self.index_mapper.index_base_map_size)?;
|
||||
}
|
||||
|
||||
// 2.3 Create a read transaction on the index-scheduler
|
||||
let rtxn = self.env.read_txn()?;
|
||||
|
||||
// 2.3 Create the update files directory
|
||||
// 2.4 Create the update files directory
|
||||
let update_files_dir = temp_snapshot_dir.path().join("update_files");
|
||||
fs::create_dir_all(&update_files_dir)?;
|
||||
|
||||
// 2.4 Only copy the update files of the enqueued tasks
|
||||
// 2.5 Only copy the update files of the enqueued tasks
|
||||
progress.update_progress(SnapshotCreationProgress::SnapshotTheUpdateFiles);
|
||||
let enqueued = self.queue.tasks.get_status(&rtxn, Status::Enqueued)?;
|
||||
let (atomic, update_file_progress) = AtomicUpdateFileStep::new(enqueued.len() as u32);
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 17, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.17.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 17, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 17, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 17, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
@@ -37,7 +37,7 @@ catto [1,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.17.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 17, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
----------------------------------------------------------------------
|
||||
@@ -40,7 +40,7 @@ doggo [2,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.17.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 17, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -43,7 +43,7 @@ doggo [2,3,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.17.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -736,7 +736,7 @@ fn test_document_addition_mixed_rights_with_index() {
|
||||
#[test]
|
||||
fn test_document_addition_mixed_right_without_index_starts_with_cant_create() {
|
||||
// We're going to autobatch multiple document addition.
|
||||
// - The index does not exists
|
||||
// - The index does not exist
|
||||
// - The first document addition don't have the right to create an index
|
||||
// - The second do. They should not batch together.
|
||||
// - The second should batch with everything else as it's going to create an index.
|
||||
|
||||
@@ -98,8 +98,8 @@ impl IndexScheduler {
|
||||
indexes_path: tempdir.path().join("indexes"),
|
||||
snapshots_path: tempdir.path().join("snapshots"),
|
||||
dumps_path: tempdir.path().join("dumps"),
|
||||
webhook_url: None,
|
||||
webhook_authorization_header: None,
|
||||
cli_webhook_url: None,
|
||||
cli_webhook_authorization: None,
|
||||
task_db_size: 1000 * 1000 * 10, // 10 MB, we don't use MiB on purpose.
|
||||
index_base_map_size: 1000 * 1000, // 1 MB, we don't use MiB on purpose.
|
||||
enable_mdb_writemap: false,
|
||||
|
||||
@@ -39,6 +39,7 @@ pub fn upgrade_index_scheduler(
|
||||
(1, 13, _) => 0,
|
||||
(1, 14, _) => 0,
|
||||
(1, 15, _) => 0,
|
||||
(1, 16, _) => 0,
|
||||
(major, minor, patch) => {
|
||||
if major > current_major
|
||||
|| (major == current_major && minor > current_minor)
|
||||
|
||||
@@ -137,6 +137,14 @@ impl HeedAuthStore {
|
||||
Action::ChatsSettingsAll => {
|
||||
actions.extend([Action::ChatsSettingsGet, Action::ChatsSettingsUpdate]);
|
||||
}
|
||||
Action::WebhooksAll => {
|
||||
actions.extend([
|
||||
Action::WebhooksGet,
|
||||
Action::WebhooksUpdate,
|
||||
Action::WebhooksDelete,
|
||||
Action::WebhooksCreate,
|
||||
]);
|
||||
}
|
||||
other => {
|
||||
actions.insert(*other);
|
||||
}
|
||||
|
||||
@@ -418,7 +418,16 @@ InvalidChatCompletionSearchDescriptionPrompt , InvalidRequest , BAD_REQU
|
||||
InvalidChatCompletionSearchQueryParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionSearchFilterParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionSearchIndexUidParamPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidChatCompletionPreQueryPrompt , InvalidRequest , BAD_REQUEST
|
||||
InvalidChatCompletionPreQueryPrompt , InvalidRequest , BAD_REQUEST ;
|
||||
// Webhooks
|
||||
InvalidWebhooks , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidWebhookUrl , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidWebhookHeaders , InvalidRequest , BAD_REQUEST ;
|
||||
ImmutableWebhook , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidWebhookUuid , InvalidRequest , BAD_REQUEST ;
|
||||
WebhookNotFound , InvalidRequest , NOT_FOUND ;
|
||||
ImmutableWebhookUuid , InvalidRequest , BAD_REQUEST ;
|
||||
ImmutableWebhookIsEditable , InvalidRequest , BAD_REQUEST
|
||||
}
|
||||
|
||||
impl ErrorCode for JoinError {
|
||||
|
||||
@@ -365,6 +365,21 @@ pub enum Action {
|
||||
#[serde(rename = "*.get")]
|
||||
#[deserr(rename = "*.get")]
|
||||
AllGet,
|
||||
#[serde(rename = "webhooks.get")]
|
||||
#[deserr(rename = "webhooks.get")]
|
||||
WebhooksGet,
|
||||
#[serde(rename = "webhooks.update")]
|
||||
#[deserr(rename = "webhooks.update")]
|
||||
WebhooksUpdate,
|
||||
#[serde(rename = "webhooks.delete")]
|
||||
#[deserr(rename = "webhooks.delete")]
|
||||
WebhooksDelete,
|
||||
#[serde(rename = "webhooks.create")]
|
||||
#[deserr(rename = "webhooks.create")]
|
||||
WebhooksCreate,
|
||||
#[serde(rename = "webhooks.*")]
|
||||
#[deserr(rename = "webhooks.*")]
|
||||
WebhooksAll,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
@@ -416,6 +431,11 @@ impl Action {
|
||||
NETWORK_GET => Some(Self::NetworkGet),
|
||||
NETWORK_UPDATE => Some(Self::NetworkUpdate),
|
||||
ALL_GET => Some(Self::AllGet),
|
||||
WEBHOOKS_GET => Some(Self::WebhooksGet),
|
||||
WEBHOOKS_UPDATE => Some(Self::WebhooksUpdate),
|
||||
WEBHOOKS_DELETE => Some(Self::WebhooksDelete),
|
||||
WEBHOOKS_CREATE => Some(Self::WebhooksCreate),
|
||||
WEBHOOKS_ALL => Some(Self::WebhooksAll),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
@@ -428,7 +448,9 @@ impl Action {
|
||||
match self {
|
||||
// Any action that expands to others must return false, as it wouldn't be able to expand recursively.
|
||||
All | AllGet | DocumentsAll | IndexesAll | ChatsAll | TasksAll | SettingsAll
|
||||
| StatsAll | MetricsAll | DumpsAll | SnapshotsAll | ChatsSettingsAll => false,
|
||||
| StatsAll | MetricsAll | DumpsAll | SnapshotsAll | ChatsSettingsAll | WebhooksAll => {
|
||||
false
|
||||
}
|
||||
|
||||
Search => true,
|
||||
DocumentsAdd => false,
|
||||
@@ -463,6 +485,10 @@ impl Action {
|
||||
ChatsDelete => false,
|
||||
ChatsSettingsGet => true,
|
||||
ChatsSettingsUpdate => false,
|
||||
WebhooksGet => true,
|
||||
WebhooksUpdate => false,
|
||||
WebhooksDelete => false,
|
||||
WebhooksCreate => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -522,6 +548,12 @@ pub mod actions {
|
||||
pub const CHATS_SETTINGS_ALL: u8 = ChatsSettingsAll.repr();
|
||||
pub const CHATS_SETTINGS_GET: u8 = ChatsSettingsGet.repr();
|
||||
pub const CHATS_SETTINGS_UPDATE: u8 = ChatsSettingsUpdate.repr();
|
||||
|
||||
pub const WEBHOOKS_GET: u8 = WebhooksGet.repr();
|
||||
pub const WEBHOOKS_UPDATE: u8 = WebhooksUpdate.repr();
|
||||
pub const WEBHOOKS_DELETE: u8 = WebhooksDelete.repr();
|
||||
pub const WEBHOOKS_CREATE: u8 = WebhooksCreate.repr();
|
||||
pub const WEBHOOKS_ALL: u8 = WebhooksAll.repr();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -577,6 +609,11 @@ pub(crate) mod test {
|
||||
assert!(ChatsSettingsGet.repr() == 42 && CHATS_SETTINGS_GET == 42);
|
||||
assert!(ChatsSettingsUpdate.repr() == 43 && CHATS_SETTINGS_UPDATE == 43);
|
||||
assert!(AllGet.repr() == 44 && ALL_GET == 44);
|
||||
assert!(WebhooksGet.repr() == 45 && WEBHOOKS_GET == 45);
|
||||
assert!(WebhooksUpdate.repr() == 46 && WEBHOOKS_UPDATE == 46);
|
||||
assert!(WebhooksDelete.repr() == 47 && WEBHOOKS_DELETE == 47);
|
||||
assert!(WebhooksCreate.repr() == 48 && WEBHOOKS_CREATE == 48);
|
||||
assert!(WebhooksAll.repr() == 49 && WEBHOOKS_ALL == 49);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -15,6 +15,7 @@ pub mod star_or;
|
||||
pub mod task_view;
|
||||
pub mod tasks;
|
||||
pub mod versioning;
|
||||
pub mod webhooks;
|
||||
pub use milli::{heed, Index};
|
||||
use uuid::Uuid;
|
||||
pub use versioning::VERSION_FILE_NAME;
|
||||
|
||||
28
crates/meilisearch-types/src/webhooks.rs
Normal file
28
crates/meilisearch-types/src/webhooks.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Webhook {
|
||||
pub url: String,
|
||||
#[serde(default)]
|
||||
pub headers: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Default, Clone, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct WebhooksView {
|
||||
#[serde(default)]
|
||||
pub webhooks: BTreeMap<Uuid, Webhook>,
|
||||
}
|
||||
|
||||
// Same as the WebhooksView instead it should never contains the CLI webhooks.
|
||||
// It's the right structure to use in the dump
|
||||
#[derive(Debug, Deserialize, Serialize, Default, Clone, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct WebhooksDumpView {
|
||||
#[serde(default)]
|
||||
pub webhooks: BTreeMap<Uuid, Webhook>,
|
||||
}
|
||||
@@ -170,5 +170,5 @@ german = ["meilisearch-types/german"]
|
||||
turkish = ["meilisearch-types/turkish"]
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.20/build.zip"
|
||||
sha1 = "82a7ddd7bf14bb5323c3d235d2b62892a98b6a59"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.22/build.zip"
|
||||
sha1 = "b70b2036b5f167da9ea0b637da8b320c7ea88254"
|
||||
|
||||
@@ -223,8 +223,8 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Arc<
|
||||
indexes_path: opt.db_path.join("indexes"),
|
||||
snapshots_path: opt.snapshot_dir.clone(),
|
||||
dumps_path: opt.dump_dir.clone(),
|
||||
webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
|
||||
webhook_authorization_header: opt.task_webhook_authorization_header.clone(),
|
||||
cli_webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
|
||||
cli_webhook_authorization: opt.task_webhook_authorization_header.clone(),
|
||||
task_db_size: opt.max_task_db_size.as_u64() as usize,
|
||||
index_base_map_size: opt.max_index_size.as_u64() as usize,
|
||||
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
|
||||
@@ -491,7 +491,12 @@ fn import_dump(
|
||||
let _ = std::fs::write(db_path.join("instance-uid"), instance_uid.to_string().as_bytes());
|
||||
};
|
||||
|
||||
// 2. Import the `Key`s.
|
||||
// 2. Import the webhooks
|
||||
if let Some(webhooks) = dump_reader.webhooks() {
|
||||
index_scheduler.update_runtime_webhooks(webhooks.webhooks.clone())?;
|
||||
}
|
||||
|
||||
// 3. Import the `Key`s.
|
||||
let mut keys = Vec::new();
|
||||
auth.raw_delete_all_keys()?;
|
||||
for key in dump_reader.keys()? {
|
||||
@@ -500,20 +505,20 @@ fn import_dump(
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
// 3. Import the `ChatCompletionSettings`s.
|
||||
// 4. Import the `ChatCompletionSettings`s.
|
||||
for result in dump_reader.chat_completions_settings()? {
|
||||
let (name, settings) = result?;
|
||||
index_scheduler.put_chat_settings(&name, &settings)?;
|
||||
}
|
||||
|
||||
// 4. Import the runtime features and network
|
||||
// 5. Import the runtime features and network
|
||||
let features = dump_reader.features()?.unwrap_or_default();
|
||||
index_scheduler.put_runtime_features(features)?;
|
||||
|
||||
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
||||
index_scheduler.put_network(network)?;
|
||||
|
||||
// 4.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
// 5.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
let backup_config;
|
||||
let base_config = index_scheduler.indexer_config();
|
||||
|
||||
@@ -530,7 +535,7 @@ fn import_dump(
|
||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||
// try to process tasks while we're trying to import the indexes.
|
||||
|
||||
// 5. Import the indexes.
|
||||
// 6. Import the indexes.
|
||||
for index_reader in dump_reader.indexes()? {
|
||||
let mut index_reader = index_reader?;
|
||||
let metadata = index_reader.metadata();
|
||||
@@ -543,12 +548,12 @@ fn import_dump(
|
||||
let mut wtxn = index.write_txn()?;
|
||||
|
||||
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
||||
// 5.1 Import the primary key if there is one.
|
||||
// 6.1 Import the primary key if there is one.
|
||||
if let Some(ref primary_key) = metadata.primary_key {
|
||||
builder.set_primary_key(primary_key.to_string());
|
||||
}
|
||||
|
||||
// 5.2 Import the settings.
|
||||
// 6.2 Import the settings.
|
||||
tracing::info!("Importing the settings.");
|
||||
let settings = index_reader.settings()?;
|
||||
apply_settings_to_builder(&settings, &mut builder);
|
||||
@@ -560,8 +565,8 @@ fn import_dump(
|
||||
let rtxn = index.read_txn()?;
|
||||
|
||||
if index_scheduler.no_edition_2024_for_dumps() {
|
||||
// 5.3 Import the documents.
|
||||
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
// 6.3 Import the documents.
|
||||
// 6.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||
tracing::info!("Importing the documents.");
|
||||
let file = tempfile::tempfile()?;
|
||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||
@@ -572,7 +577,7 @@ fn import_dump(
|
||||
// This flush the content of the batch builder.
|
||||
let file = builder.into_inner()?.into_inner()?;
|
||||
|
||||
// 5.3.2 We feed it to the milli index.
|
||||
// 6.3.2 We feed it to the milli index.
|
||||
let reader = BufReader::new(file);
|
||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||
|
||||
@@ -651,15 +656,15 @@ fn import_dump(
|
||||
index_scheduler.refresh_index_stats(&uid)?;
|
||||
}
|
||||
|
||||
// 6. Import the queue
|
||||
// 7. Import the queue
|
||||
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
||||
// 6.1. Import the batches
|
||||
// 7.1. Import the batches
|
||||
for ret in dump_reader.batches()? {
|
||||
let batch = ret?;
|
||||
index_scheduler_dump.register_dumped_batch(batch)?;
|
||||
}
|
||||
|
||||
// 6.2. Import the tasks
|
||||
// 7.2. Import the tasks
|
||||
for ret in dump_reader.tasks()? {
|
||||
let (task, file) = ret?;
|
||||
index_scheduler_dump.register_dumped_task(task, file)?;
|
||||
|
||||
@@ -15,30 +15,33 @@ lazy_static! {
|
||||
"Meilisearch number of degraded search requests"
|
||||
))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_SEARCH_REQUESTS: IntCounterVec = register_int_counter_vec!(
|
||||
pub static ref MEILISEARCH_CHAT_SEARCHES_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!(
|
||||
"meilisearch_chat_search_requests",
|
||||
"Meilisearch number of search requests performed by the chat route itself"
|
||||
"meilisearch_chat_searches_total",
|
||||
"Total number of searches performed by the chat route"
|
||||
),
|
||||
&["type"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("meilisearch_chat_prompt_tokens_usage", "Meilisearch Chat Prompt Tokens Usage"),
|
||||
pub static ref MEILISEARCH_CHAT_PROMPT_TOKENS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("meilisearch_chat_prompt_tokens_total", "Total number of prompt tokens consumed"),
|
||||
&["workspace", "model"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE: IntCounterVec =
|
||||
pub static ref MEILISEARCH_CHAT_COMPLETION_TOKENS_TOTAL: IntCounterVec =
|
||||
register_int_counter_vec!(
|
||||
opts!(
|
||||
"meilisearch_chat_completion_tokens_usage",
|
||||
"Meilisearch Chat Completion Tokens Usage"
|
||||
"meilisearch_chat_completion_tokens_total",
|
||||
"Total number of completion tokens consumed"
|
||||
),
|
||||
&["workspace", "model"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
|
||||
opts!("meilisearch_chat_total_tokens_usage", "Meilisearch Chat Total Tokens Usage"),
|
||||
pub static ref MEILISEARCH_CHAT_TOKENS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||
opts!(
|
||||
"meilisearch_chat_tokens_total",
|
||||
"Total number of tokens consumed (prompt + completion)"
|
||||
),
|
||||
&["workspace", "model"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
|
||||
@@ -206,11 +206,13 @@ pub struct Opt {
|
||||
pub env: String,
|
||||
|
||||
/// Called whenever a task finishes so a third party can be notified.
|
||||
/// See also the dedicated API `/webhooks`.
|
||||
#[clap(long, env = MEILI_TASK_WEBHOOK_URL)]
|
||||
pub task_webhook_url: Option<Url>,
|
||||
|
||||
/// The Authorization header to send on the webhook URL whenever
|
||||
/// a task finishes so a third party can be notified.
|
||||
/// See also the dedicated API `/webhooks`.
|
||||
#[clap(long, env = MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER)]
|
||||
pub task_webhook_authorization_header: Option<String>,
|
||||
|
||||
|
||||
@@ -50,8 +50,8 @@ use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
|
||||
use crate::metrics::{
|
||||
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE, MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE,
|
||||
MEILISEARCH_CHAT_SEARCH_REQUESTS, MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE,
|
||||
MEILISEARCH_CHAT_COMPLETION_TOKENS_TOTAL, MEILISEARCH_CHAT_PROMPT_TOKENS_TOTAL,
|
||||
MEILISEARCH_CHAT_SEARCHES_TOTAL, MEILISEARCH_CHAT_TOKENS_TOTAL,
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS,
|
||||
};
|
||||
use crate::routes::chats::utils::SseEventSender;
|
||||
@@ -319,7 +319,7 @@ async fn process_search_request(
|
||||
};
|
||||
let mut documents = Vec::new();
|
||||
if let Ok((ref rtxn, ref search_result)) = output {
|
||||
MEILISEARCH_CHAT_SEARCH_REQUESTS.with_label_values(&["internal"]).inc();
|
||||
MEILISEARCH_CHAT_SEARCHES_TOTAL.with_label_values(&["internal"]).inc();
|
||||
if search_result.degraded {
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||
}
|
||||
@@ -596,13 +596,13 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
match result {
|
||||
Ok(resp) => {
|
||||
if let Some(usage) = resp.usage.as_ref() {
|
||||
MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE
|
||||
MEILISEARCH_CHAT_PROMPT_TOKENS_TOTAL
|
||||
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||
.inc_by(usage.prompt_tokens as u64);
|
||||
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE
|
||||
MEILISEARCH_CHAT_COMPLETION_TOKENS_TOTAL
|
||||
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||
.inc_by(usage.completion_tokens as u64);
|
||||
MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE
|
||||
MEILISEARCH_CHAT_TOKENS_TOTAL
|
||||
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||
.inc_by(usage.total_tokens as u64);
|
||||
}
|
||||
|
||||
@@ -139,6 +139,8 @@ pub struct DocumentsFetchAggregator<Method: AggregateMethod> {
|
||||
per_document_id: bool,
|
||||
// if a filter was used
|
||||
per_filter: bool,
|
||||
with_vector_filter: bool,
|
||||
|
||||
// if documents were sorted
|
||||
sort: bool,
|
||||
|
||||
@@ -166,6 +168,7 @@ impl<Method: AggregateMethod> Aggregate for DocumentsFetchAggregator<Method> {
|
||||
Box::new(Self {
|
||||
per_document_id: self.per_document_id | new.per_document_id,
|
||||
per_filter: self.per_filter | new.per_filter,
|
||||
with_vector_filter: self.with_vector_filter | new.with_vector_filter,
|
||||
sort: self.sort | new.sort,
|
||||
retrieve_vectors: self.retrieve_vectors | new.retrieve_vectors,
|
||||
max_limit: self.max_limit.max(new.max_limit),
|
||||
@@ -250,6 +253,7 @@ pub async fn get_document(
|
||||
retrieve_vectors: param_retrieve_vectors.0,
|
||||
per_document_id: true,
|
||||
per_filter: false,
|
||||
with_vector_filter: false,
|
||||
sort: false,
|
||||
max_limit: 0,
|
||||
max_offset: 0,
|
||||
@@ -475,6 +479,10 @@ pub async fn documents_by_query_post(
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsPOST> {
|
||||
per_filter: body.filter.is_some(),
|
||||
with_vector_filter: body
|
||||
.filter
|
||||
.as_ref()
|
||||
.is_some_and(|f| f.to_string().contains("_vectors")),
|
||||
sort: body.sort.is_some(),
|
||||
retrieve_vectors: body.retrieve_vectors,
|
||||
max_limit: body.limit,
|
||||
@@ -576,6 +584,10 @@ pub async fn get_documents(
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsGET> {
|
||||
per_filter: query.filter.is_some(),
|
||||
with_vector_filter: query
|
||||
.filter
|
||||
.as_ref()
|
||||
.is_some_and(|f| f.to_string().contains("_vectors")),
|
||||
sort: query.sort.is_some(),
|
||||
retrieve_vectors: query.retrieve_vectors,
|
||||
max_limit: query.limit,
|
||||
@@ -1455,8 +1467,6 @@ fn some_documents<'a, 't: 'a>(
|
||||
document.remove("_vectors");
|
||||
}
|
||||
RetrieveVectors::Retrieve => {
|
||||
// Clippy is simply wrong
|
||||
#[allow(clippy::manual_unwrap_or_default)]
|
||||
let mut vectors = match document.remove("_vectors") {
|
||||
Some(Value::Object(map)) => map,
|
||||
_ => Default::default(),
|
||||
|
||||
@@ -40,6 +40,7 @@ pub struct SearchAggregator<Method: AggregateMethod> {
|
||||
// filter
|
||||
filter_with_geo_radius: bool,
|
||||
filter_with_geo_bounding_box: bool,
|
||||
filter_on_vectors: bool,
|
||||
// every time a request has a filter, this field must be incremented by the number of terms it contains
|
||||
filter_sum_of_criteria_terms: usize,
|
||||
// every time a request has a filter, this field must be incremented by one
|
||||
@@ -163,6 +164,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
let stringified_filters = filter.to_string();
|
||||
ret.filter_with_geo_radius = stringified_filters.contains("_geoRadius(");
|
||||
ret.filter_with_geo_bounding_box = stringified_filters.contains("_geoBoundingBox(");
|
||||
ret.filter_on_vectors = stringified_filters.contains("_vectors");
|
||||
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
|
||||
}
|
||||
|
||||
@@ -224,6 +226,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
let SearchResult {
|
||||
hits: _,
|
||||
query: _,
|
||||
query_vector: _,
|
||||
processing_time_ms,
|
||||
hits_info: _,
|
||||
semantic_hit_count: _,
|
||||
@@ -260,6 +263,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
distinct,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_on_vectors,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
@@ -314,6 +318,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
// filter
|
||||
self.filter_with_geo_radius |= filter_with_geo_radius;
|
||||
self.filter_with_geo_bounding_box |= filter_with_geo_bounding_box;
|
||||
self.filter_on_vectors |= filter_on_vectors;
|
||||
self.filter_sum_of_criteria_terms =
|
||||
self.filter_sum_of_criteria_terms.saturating_add(filter_sum_of_criteria_terms);
|
||||
self.filter_total_number_of_criteria =
|
||||
@@ -388,6 +393,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
distinct,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_on_vectors,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
@@ -445,6 +451,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
"filter": {
|
||||
"with_geoRadius": filter_with_geo_radius,
|
||||
"with_geoBoundingBox": filter_with_geo_bounding_box,
|
||||
"on_vectors": filter_on_vectors,
|
||||
"avg_criteria_number": format!("{:.2}", filter_sum_of_criteria_terms as f64 / filter_total_number_of_criteria as f64),
|
||||
"most_used_syntax": used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
|
||||
@@ -511,7 +511,7 @@ make_setting_routes!(
|
||||
},
|
||||
{
|
||||
route: "/chat",
|
||||
update_verb: put,
|
||||
update_verb: patch,
|
||||
value_type: ChatSettings,
|
||||
err_type: meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsIndexChat,
|
||||
|
||||
@@ -41,6 +41,7 @@ use crate::routes::indexes::IndexView;
|
||||
use crate::routes::multi_search::SearchResults;
|
||||
use crate::routes::network::{Network, Remote};
|
||||
use crate::routes::swap_indexes::SwapIndexesPayload;
|
||||
use crate::routes::webhooks::{WebhookResults, WebhookSettings, WebhookWithMetadata};
|
||||
use crate::search::{
|
||||
FederatedSearch, FederatedSearchResult, Federation, FederationOptions, MergeFacets,
|
||||
SearchQueryWithIndex, SearchResultWithIndex, SimilarQuery, SimilarResult,
|
||||
@@ -70,6 +71,7 @@ mod swap_indexes;
|
||||
pub mod tasks;
|
||||
#[cfg(test)]
|
||||
mod tasks_test;
|
||||
mod webhooks;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
@@ -89,6 +91,7 @@ mod tasks_test;
|
||||
(path = "/experimental-features", api = features::ExperimentalFeaturesApi),
|
||||
(path = "/export", api = export::ExportApi),
|
||||
(path = "/network", api = network::NetworkApi),
|
||||
(path = "/webhooks", api = webhooks::WebhooksApi),
|
||||
),
|
||||
paths(get_health, get_version, get_stats),
|
||||
tags(
|
||||
@@ -99,7 +102,7 @@ mod tasks_test;
|
||||
url = "/",
|
||||
description = "Local server",
|
||||
)),
|
||||
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures, Export))
|
||||
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures, Export, WebhookSettings, WebhookResults, WebhookWithMetadata))
|
||||
)]
|
||||
pub struct MeilisearchApi;
|
||||
|
||||
@@ -120,7 +123,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/experimental-features").configure(features::configure))
|
||||
.service(web::scope("/network").configure(network::configure))
|
||||
.service(web::scope("/export").configure(export::configure))
|
||||
.service(web::scope("/chats").configure(chats::configure));
|
||||
.service(web::scope("/chats").configure(chats::configure))
|
||||
.service(web::scope("/webhooks").configure(webhooks::configure));
|
||||
|
||||
#[cfg(feature = "swagger")]
|
||||
{
|
||||
|
||||
@@ -51,7 +51,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
get,
|
||||
path = "",
|
||||
tag = "Network",
|
||||
security(("Bearer" = ["network.get", "network.*", "*"])),
|
||||
security(("Bearer" = ["network.get", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "Known nodes are returned", body = Network, content_type = "application/json", example = json!(
|
||||
{
|
||||
@@ -168,7 +168,7 @@ impl Aggregate for PatchNetworkAnalytics {
|
||||
path = "",
|
||||
tag = "Network",
|
||||
request_body = Network,
|
||||
security(("Bearer" = ["network.update", "network.*", "*"])),
|
||||
security(("Bearer" = ["network.update", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "New network state is returned", body = Network, content_type = "application/json", example = json!(
|
||||
{
|
||||
|
||||
@@ -336,7 +336,7 @@ impl<Method: AggregateMethod + 'static> Aggregate for TaskFilterAnalytics<Method
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
(status = 404, description = "The task uid does not exists", body = ResponseError, content_type = "application/json", example = json!(
|
||||
(status = 404, description = "The task uid does not exist", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "Task :taskUid not found.",
|
||||
"code": "task_not_found",
|
||||
@@ -430,7 +430,7 @@ async fn cancel_tasks(
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
(status = 404, description = "The task uid does not exists", body = ResponseError, content_type = "application/json", example = json!(
|
||||
(status = 404, description = "The task uid does not exist", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "Task :taskUid not found.",
|
||||
"code": "task_not_found",
|
||||
@@ -611,7 +611,7 @@ async fn get_tasks(
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
(status = 404, description = "The task uid does not exists", body = ResponseError, content_type = "application/json", example = json!(
|
||||
(status = 404, description = "The task uid does not exist", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "Task :taskUid not found.",
|
||||
"code": "task_not_found",
|
||||
@@ -665,7 +665,7 @@ async fn get_task(
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
(status = 404, description = "The task uid does not exists", body = ResponseError, content_type = "application/json", example = json!(
|
||||
(status = 404, description = "The task uid does not exist", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "Task :taskUid not found.",
|
||||
"code": "task_not_found",
|
||||
|
||||
474
crates/meilisearch/src/routes/webhooks.rs
Normal file
474
crates/meilisearch/src/routes/webhooks.rs
Normal file
@@ -0,0 +1,474 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use actix_http::header::{
|
||||
HeaderName, HeaderValue, InvalidHeaderName as ActixInvalidHeaderName,
|
||||
InvalidHeaderValue as ActixInvalidHeaderValue,
|
||||
};
|
||||
use actix_web::web::{self, Data, Path};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use core::convert::Infallible;
|
||||
use deserr::actix_web::AwebJson;
|
||||
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError};
|
||||
use meilisearch_types::error::deserr_codes::{
|
||||
BadRequest, InvalidWebhookHeaders, InvalidWebhookUrl,
|
||||
};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::webhooks::Webhook;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use WebhooksError::*;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
paths(get_webhooks, get_webhook, post_webhook, patch_webhook, delete_webhook),
|
||||
tags((
|
||||
name = "Webhooks",
|
||||
description = "The `/webhooks` route allows you to register endpoints to be called once tasks are processed.",
|
||||
external_docs(url = "https://www.meilisearch.com/docs/reference/api/webhooks"),
|
||||
)),
|
||||
)]
|
||||
pub struct WebhooksApi;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_webhooks))
|
||||
.route(web::post().to(SeqHandler(post_webhook))),
|
||||
)
|
||||
.service(
|
||||
web::resource("/{uuid}")
|
||||
.route(web::get().to(get_webhook))
|
||||
.route(web::patch().to(SeqHandler(patch_webhook)))
|
||||
.route(web::delete().to(SeqHandler(delete_webhook))),
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr, ToSchema)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_webhook)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub(super) struct WebhookSettings {
|
||||
#[schema(value_type = Option<String>, example = "https://your.site/on-tasks-completed")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidWebhookUrl>)]
|
||||
#[serde(default)]
|
||||
url: Setting<String>,
|
||||
#[schema(value_type = Option<BTreeMap<String, String>>, example = json!({"Authorization":"Bearer a-secret-token"}))]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidWebhookHeaders>)]
|
||||
#[serde(default)]
|
||||
headers: Setting<BTreeMap<String, Setting<String>>>,
|
||||
}
|
||||
|
||||
fn deny_immutable_fields_webhook(
|
||||
field: &str,
|
||||
accepted: &[&str],
|
||||
location: ValuePointerRef,
|
||||
) -> DeserrJsonError {
|
||||
match field {
|
||||
"uuid" => immutable_field_error(field, accepted, Code::ImmutableWebhookUuid),
|
||||
"isEditable" => immutable_field_error(field, accepted, Code::ImmutableWebhookIsEditable),
|
||||
_ => deserr::take_cf_content(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||
location,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub(super) struct WebhookWithMetadata {
|
||||
uuid: Uuid,
|
||||
is_editable: bool,
|
||||
#[schema(value_type = WebhookSettings)]
|
||||
#[serde(flatten)]
|
||||
webhook: Webhook,
|
||||
}
|
||||
|
||||
impl WebhookWithMetadata {
|
||||
pub fn from(uuid: Uuid, webhook: Webhook) -> Self {
|
||||
Self { uuid, is_editable: uuid != Uuid::nil(), webhook }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub(super) struct WebhookResults {
|
||||
results: Vec<WebhookWithMetadata>,
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "",
|
||||
tag = "Webhooks",
|
||||
security(("Bearer" = ["webhooks.get", "webhooks.*", "*.get", "*"])),
|
||||
responses(
|
||||
(status = OK, description = "Webhooks are returned", body = WebhookResults, content_type = "application/json", example = json!({
|
||||
"results": [
|
||||
{
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
},
|
||||
"isEditable": true
|
||||
},
|
||||
{
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440001",
|
||||
"url": "https://another.site/on-tasks-completed",
|
||||
"isEditable": true
|
||||
}
|
||||
]
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||
"code": "missing_authorization_header",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
)
|
||||
)]
|
||||
async fn get_webhooks(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_GET }>, Data<IndexScheduler>>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let webhooks = index_scheduler.webhooks_view();
|
||||
let results = webhooks
|
||||
.webhooks
|
||||
.into_iter()
|
||||
.map(|(uuid, webhook)| WebhookWithMetadata::from(uuid, webhook))
|
||||
.collect::<Vec<_>>();
|
||||
let results = WebhookResults { results };
|
||||
|
||||
debug!(returns = ?results, "Get webhooks");
|
||||
Ok(HttpResponse::Ok().json(results))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct PatchWebhooksAnalytics;
|
||||
|
||||
impl Aggregate for PatchWebhooksAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Webhooks Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, _new: Box<Self>) -> Box<Self> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct PostWebhooksAnalytics;
|
||||
|
||||
impl Aggregate for PostWebhooksAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Webhooks Created"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, _new: Box<Self>) -> Box<Self> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
enum WebhooksError {
|
||||
#[error("The URL for the webhook `{0}` is missing.")]
|
||||
MissingUrl(Uuid),
|
||||
#[error("Defining too many webhooks would crush the server. Please limit the number of webhooks to 20. You may use a third-party proxy server to dispatch events to more than 20 endpoints.")]
|
||||
TooManyWebhooks,
|
||||
#[error("Too many headers for the webhook `{0}`. Please limit the number of headers to 200. Hint: To remove an already defined header set its value to `null`")]
|
||||
TooManyHeaders(Uuid),
|
||||
#[error("Webhook `{0}` is immutable. The webhook defined from the command line cannot be modified using the API.")]
|
||||
ImmutableWebhook(Uuid),
|
||||
#[error("Webhook `{0}` not found.")]
|
||||
WebhookNotFound(Uuid),
|
||||
#[error("Invalid header name `{0}`: {1}")]
|
||||
InvalidHeaderName(String, ActixInvalidHeaderName),
|
||||
#[error("Invalid header value `{0}`: {1}")]
|
||||
InvalidHeaderValue(String, ActixInvalidHeaderValue),
|
||||
#[error("Invalid URL `{0}`: {1}")]
|
||||
InvalidUrl(String, url::ParseError),
|
||||
#[error("Invalid UUID: {0}")]
|
||||
InvalidUuid(uuid::Error),
|
||||
}
|
||||
|
||||
impl ErrorCode for WebhooksError {
|
||||
fn error_code(&self) -> meilisearch_types::error::Code {
|
||||
match self {
|
||||
MissingUrl(_) => meilisearch_types::error::Code::InvalidWebhookUrl,
|
||||
TooManyWebhooks => meilisearch_types::error::Code::InvalidWebhooks,
|
||||
TooManyHeaders(_) => meilisearch_types::error::Code::InvalidWebhookHeaders,
|
||||
ImmutableWebhook(_) => meilisearch_types::error::Code::ImmutableWebhook,
|
||||
WebhookNotFound(_) => meilisearch_types::error::Code::WebhookNotFound,
|
||||
InvalidHeaderName(_, _) => meilisearch_types::error::Code::InvalidWebhookHeaders,
|
||||
InvalidHeaderValue(_, _) => meilisearch_types::error::Code::InvalidWebhookHeaders,
|
||||
InvalidUrl(_, _) => meilisearch_types::error::Code::InvalidWebhookUrl,
|
||||
InvalidUuid(_) => meilisearch_types::error::Code::InvalidWebhookUuid,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn patch_webhook_inner(
|
||||
uuid: &Uuid,
|
||||
old_webhook: Webhook,
|
||||
new_webhook: WebhookSettings,
|
||||
) -> Result<Webhook, WebhooksError> {
|
||||
let Webhook { url: old_url, mut headers } = old_webhook;
|
||||
|
||||
let url = match new_webhook.url {
|
||||
Setting::Set(url) => url,
|
||||
Setting::NotSet => old_url,
|
||||
Setting::Reset => return Err(MissingUrl(uuid.to_owned())),
|
||||
};
|
||||
|
||||
match new_webhook.headers {
|
||||
Setting::Set(new_headers) => {
|
||||
for (name, value) in new_headers {
|
||||
match value {
|
||||
Setting::Set(value) => {
|
||||
headers.insert(name, value);
|
||||
}
|
||||
Setting::NotSet => continue,
|
||||
Setting::Reset => {
|
||||
headers.remove(&name);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Setting::Reset => headers.clear(),
|
||||
Setting::NotSet => (),
|
||||
};
|
||||
|
||||
if headers.len() > 200 {
|
||||
return Err(TooManyHeaders(uuid.to_owned()));
|
||||
}
|
||||
|
||||
Ok(Webhook { url, headers })
|
||||
}
|
||||
|
||||
fn check_changed(uuid: Uuid, webhook: &Webhook) -> Result<(), WebhooksError> {
|
||||
if uuid.is_nil() {
|
||||
return Err(ImmutableWebhook(uuid));
|
||||
}
|
||||
|
||||
if webhook.url.is_empty() {
|
||||
return Err(MissingUrl(uuid));
|
||||
}
|
||||
|
||||
if webhook.headers.len() > 200 {
|
||||
return Err(TooManyHeaders(uuid));
|
||||
}
|
||||
|
||||
for (header, value) in &webhook.headers {
|
||||
HeaderName::from_bytes(header.as_bytes())
|
||||
.map_err(|e| InvalidHeaderName(header.to_owned(), e))?;
|
||||
HeaderValue::from_str(value).map_err(|e| InvalidHeaderValue(header.to_owned(), e))?;
|
||||
}
|
||||
|
||||
if let Err(e) = Url::parse(&webhook.url) {
|
||||
return Err(InvalidUrl(webhook.url.to_owned(), e));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/{uuid}",
|
||||
tag = "Webhooks",
|
||||
security(("Bearer" = ["webhooks.get", "webhooks.*", "*.get", "*"])),
|
||||
responses(
|
||||
(status = 200, description = "Webhook found", body = WebhookWithMetadata, content_type = "application/json", example = json!({
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret"
|
||||
},
|
||||
"isEditable": true
|
||||
})),
|
||||
(status = 404, description = "Webhook not found", body = ResponseError, content_type = "application/json"),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
),
|
||||
params(
|
||||
("uuid" = Uuid, Path, description = "The universally unique identifier of the webhook")
|
||||
)
|
||||
)]
|
||||
async fn get_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_GET }>, Data<IndexScheduler>>,
|
||||
uuid: Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let uuid = Uuid::from_str(&uuid.into_inner()).map_err(InvalidUuid)?;
|
||||
let mut webhooks = index_scheduler.webhooks_view();
|
||||
|
||||
let webhook = webhooks.webhooks.remove(&uuid).ok_or(WebhookNotFound(uuid))?;
|
||||
let webhook = WebhookWithMetadata::from(uuid, webhook);
|
||||
|
||||
debug!(returns = ?webhook, "Get webhook");
|
||||
Ok(HttpResponse::Ok().json(webhook))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "",
|
||||
tag = "Webhooks",
|
||||
request_body = WebhookSettings,
|
||||
security(("Bearer" = ["webhooks.create", "webhooks.*", "*"])),
|
||||
responses(
|
||||
(status = 201, description = "Webhook created successfully", body = WebhookWithMetadata, content_type = "application/json", example = json!({
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
},
|
||||
"isEditable": true
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
(status = 400, description = "Bad request", body = ResponseError, content_type = "application/json"),
|
||||
)
|
||||
)]
|
||||
async fn post_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_CREATE }>, Data<IndexScheduler>>,
|
||||
webhook_settings: AwebJson<WebhookSettings, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let webhook_settings = webhook_settings.into_inner();
|
||||
debug!(parameters = ?webhook_settings, "Post webhook");
|
||||
|
||||
let uuid = Uuid::new_v4();
|
||||
if webhook_settings.headers.as_ref().set().is_some_and(|h| h.len() > 200) {
|
||||
return Err(TooManyHeaders(uuid).into());
|
||||
}
|
||||
|
||||
let mut webhooks = index_scheduler.retrieve_runtime_webhooks();
|
||||
if webhooks.len() >= 20 {
|
||||
return Err(TooManyWebhooks.into());
|
||||
}
|
||||
|
||||
let webhook = Webhook {
|
||||
url: webhook_settings.url.set().ok_or(MissingUrl(uuid))?,
|
||||
headers: webhook_settings
|
||||
.headers
|
||||
.set()
|
||||
.map(|h| h.into_iter().map(|(k, v)| (k, v.set().unwrap_or_default())).collect())
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
check_changed(uuid, &webhook)?;
|
||||
webhooks.insert(uuid, webhook.clone());
|
||||
index_scheduler.update_runtime_webhooks(webhooks)?;
|
||||
|
||||
analytics.publish(PostWebhooksAnalytics, &req);
|
||||
|
||||
let response = WebhookWithMetadata::from(uuid, webhook);
|
||||
debug!(returns = ?response, "Post webhook");
|
||||
Ok(HttpResponse::Created().json(response))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
patch,
|
||||
path = "/{uuid}",
|
||||
tag = "Webhooks",
|
||||
request_body = WebhookSettings,
|
||||
security(("Bearer" = ["webhooks.update", "webhooks.*", "*"])),
|
||||
responses(
|
||||
(status = 200, description = "Webhook updated successfully", body = WebhookWithMetadata, content_type = "application/json", example = json!({
|
||||
"uuid": "550e8400-e29b-41d4-a716-446655440000",
|
||||
"url": "https://your.site/on-tasks-completed",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
},
|
||||
"isEditable": true
|
||||
})),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
(status = 400, description = "Bad request", body = ResponseError, content_type = "application/json"),
|
||||
),
|
||||
params(
|
||||
("uuid" = Uuid, Path, description = "The universally unique identifier of the webhook")
|
||||
)
|
||||
)]
|
||||
async fn patch_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_UPDATE }>, Data<IndexScheduler>>,
|
||||
uuid: Path<String>,
|
||||
webhook_settings: AwebJson<WebhookSettings, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let uuid = Uuid::from_str(&uuid.into_inner()).map_err(InvalidUuid)?;
|
||||
let webhook_settings = webhook_settings.into_inner();
|
||||
debug!(parameters = ?(uuid, &webhook_settings), "Patch webhook");
|
||||
|
||||
if uuid.is_nil() {
|
||||
return Err(ImmutableWebhook(uuid).into());
|
||||
}
|
||||
|
||||
let mut webhooks = index_scheduler.retrieve_runtime_webhooks();
|
||||
let old_webhook = webhooks.remove(&uuid).ok_or(WebhookNotFound(uuid))?;
|
||||
let webhook = patch_webhook_inner(&uuid, old_webhook, webhook_settings)?;
|
||||
|
||||
check_changed(uuid, &webhook)?;
|
||||
webhooks.insert(uuid, webhook.clone());
|
||||
index_scheduler.update_runtime_webhooks(webhooks)?;
|
||||
|
||||
analytics.publish(PatchWebhooksAnalytics, &req);
|
||||
|
||||
let response = WebhookWithMetadata::from(uuid, webhook);
|
||||
debug!(returns = ?response, "Patch webhook");
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/{uuid}",
|
||||
tag = "Webhooks",
|
||||
security(("Bearer" = ["webhooks.delete", "webhooks.*", "*"])),
|
||||
responses(
|
||||
(status = 204, description = "Webhook deleted successfully"),
|
||||
(status = 404, description = "Webhook not found", body = ResponseError, content_type = "application/json"),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json"),
|
||||
),
|
||||
params(
|
||||
("uuid" = Uuid, Path, description = "The universally unique identifier of the webhook")
|
||||
)
|
||||
)]
|
||||
async fn delete_webhook(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::WEBHOOKS_DELETE }>, Data<IndexScheduler>>,
|
||||
uuid: Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let uuid = Uuid::from_str(&uuid.into_inner()).map_err(InvalidUuid)?;
|
||||
debug!(parameters = ?uuid, "Delete webhook");
|
||||
|
||||
if uuid.is_nil() {
|
||||
return Err(ImmutableWebhook(uuid).into());
|
||||
}
|
||||
|
||||
let mut webhooks = index_scheduler.retrieve_runtime_webhooks();
|
||||
webhooks.remove(&uuid).ok_or(WebhookNotFound(uuid))?;
|
||||
index_scheduler.update_runtime_webhooks(webhooks)?;
|
||||
|
||||
debug!(returns = "No Content", "Delete webhook");
|
||||
Ok(HttpResponse::NoContent().finish())
|
||||
}
|
||||
@@ -13,6 +13,7 @@ use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::features::{Network, Remote};
|
||||
use meilisearch_types::milli::order_by_map::OrderByMap;
|
||||
use meilisearch_types::milli::score_details::{ScoreDetails, WeightedScoreValue};
|
||||
use meilisearch_types::milli::vector::Embedding;
|
||||
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET};
|
||||
use roaring::RoaringBitmap;
|
||||
use tokio::task::JoinHandle;
|
||||
@@ -46,6 +47,7 @@ pub async fn perform_federated_search(
|
||||
let deadline = before_search + std::time::Duration::from_secs(9);
|
||||
|
||||
let required_hit_count = federation.limit + federation.offset;
|
||||
let retrieve_vectors = queries.iter().any(|q| q.retrieve_vectors);
|
||||
|
||||
let network = index_scheduler.network();
|
||||
|
||||
@@ -91,6 +93,7 @@ pub async fn perform_federated_search(
|
||||
federation,
|
||||
mut semantic_hit_count,
|
||||
mut results_by_index,
|
||||
mut query_vectors,
|
||||
previous_query_data: _,
|
||||
facet_order,
|
||||
} = search_by_index;
|
||||
@@ -122,7 +125,26 @@ pub async fn perform_federated_search(
|
||||
.map(|hit| hit.hit())
|
||||
.collect();
|
||||
|
||||
// 3.3. merge facets
|
||||
// 3.3. merge query vectors
|
||||
let query_vectors = if retrieve_vectors {
|
||||
for remote_results in remote_results.iter_mut() {
|
||||
if let Some(remote_vectors) = remote_results.query_vectors.take() {
|
||||
for (key, value) in remote_vectors.into_iter() {
|
||||
debug_assert!(
|
||||
!query_vectors.contains_key(&key),
|
||||
"Query vector for query {key} already exists"
|
||||
);
|
||||
query_vectors.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(query_vectors)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// 3.4. merge facets
|
||||
let (facet_distribution, facet_stats, facets_by_index) =
|
||||
facet_order.merge(federation.merge_facets, remote_results, facets);
|
||||
|
||||
@@ -140,6 +162,7 @@ pub async fn perform_federated_search(
|
||||
offset: federation.offset,
|
||||
estimated_total_hits,
|
||||
},
|
||||
query_vectors,
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
@@ -408,6 +431,7 @@ fn merge_metadata(
|
||||
hits: _,
|
||||
processing_time_ms,
|
||||
hits_info,
|
||||
query_vectors: _,
|
||||
semantic_hit_count: _,
|
||||
facet_distribution: _,
|
||||
facet_stats: _,
|
||||
@@ -657,6 +681,7 @@ struct SearchByIndex {
|
||||
// Then when merging, we'll update its value if there is any semantic hit
|
||||
semantic_hit_count: Option<u32>,
|
||||
results_by_index: Vec<SearchResultByIndex>,
|
||||
query_vectors: BTreeMap<usize, Embedding>,
|
||||
previous_query_data: Option<(RankingRules, usize, String)>,
|
||||
// remember the order and name of first index for each facet when merging with index settings
|
||||
// to detect if the order is inconsistent for a facet.
|
||||
@@ -674,6 +699,7 @@ impl SearchByIndex {
|
||||
federation,
|
||||
semantic_hit_count: None,
|
||||
results_by_index: Vec::with_capacity(index_count),
|
||||
query_vectors: BTreeMap::new(),
|
||||
previous_query_data: None,
|
||||
}
|
||||
}
|
||||
@@ -837,8 +863,19 @@ impl SearchByIndex {
|
||||
document_scores,
|
||||
degraded: query_degraded,
|
||||
used_negative_operator: query_used_negative_operator,
|
||||
query_vector,
|
||||
} = result;
|
||||
|
||||
if query.retrieve_vectors {
|
||||
if let Some(query_vector) = query_vector {
|
||||
debug_assert!(
|
||||
!self.query_vectors.contains_key(&query_index),
|
||||
"Query vector for query {query_index} already exists"
|
||||
);
|
||||
self.query_vectors.insert(query_index, query_vector);
|
||||
}
|
||||
}
|
||||
|
||||
candidates |= query_candidates;
|
||||
degraded |= query_degraded;
|
||||
used_negative_operator |= query_used_negative_operator;
|
||||
|
||||
@@ -18,6 +18,7 @@ use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex};
|
||||
use crate::milli::vector::Embedding;
|
||||
|
||||
pub const DEFAULT_FEDERATED_WEIGHT: f64 = 1.0;
|
||||
|
||||
@@ -117,6 +118,9 @@ pub struct FederatedSearchResult {
|
||||
#[serde(flatten)]
|
||||
pub hits_info: HitsInfo,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub query_vectors: Option<BTreeMap<usize, Embedding>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub semantic_hit_count: Option<u32>,
|
||||
|
||||
@@ -144,6 +148,7 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
hits,
|
||||
processing_time_ms,
|
||||
hits_info,
|
||||
query_vectors,
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
@@ -158,6 +163,10 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
debug.field("processing_time_ms", &processing_time_ms);
|
||||
debug.field("hits", &format!("[{} hits returned]", hits.len()));
|
||||
debug.field("hits_info", &hits_info);
|
||||
if let Some(query_vectors) = query_vectors {
|
||||
let known = query_vectors.len();
|
||||
debug.field("query_vectors", &format!("[{known} known vectors]"));
|
||||
}
|
||||
if *used_negative_operator {
|
||||
debug.field("used_negative_operator", used_negative_operator);
|
||||
}
|
||||
|
||||
@@ -841,6 +841,8 @@ pub struct SearchHit {
|
||||
pub struct SearchResult {
|
||||
pub hits: Vec<SearchHit>,
|
||||
pub query: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub query_vector: Option<Vec<f32>>,
|
||||
pub processing_time_ms: u128,
|
||||
#[serde(flatten)]
|
||||
pub hits_info: HitsInfo,
|
||||
@@ -865,6 +867,7 @@ impl fmt::Debug for SearchResult {
|
||||
let SearchResult {
|
||||
hits,
|
||||
query,
|
||||
query_vector,
|
||||
processing_time_ms,
|
||||
hits_info,
|
||||
facet_distribution,
|
||||
@@ -879,6 +882,9 @@ impl fmt::Debug for SearchResult {
|
||||
debug.field("processing_time_ms", &processing_time_ms);
|
||||
debug.field("hits", &format!("[{} hits returned]", hits.len()));
|
||||
debug.field("query", &query);
|
||||
if query_vector.is_some() {
|
||||
debug.field("query_vector", &"[...]");
|
||||
}
|
||||
debug.field("hits_info", &hits_info);
|
||||
if *used_negative_operator {
|
||||
debug.field("used_negative_operator", used_negative_operator);
|
||||
@@ -1050,6 +1056,7 @@ pub fn prepare_search<'t>(
|
||||
.map(|x| x as usize)
|
||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
|
||||
|
||||
search.retrieve_vectors(query.retrieve_vectors);
|
||||
search.exhaustive_number_hits(is_finite_pagination);
|
||||
search.max_total_hits(Some(max_total_hits));
|
||||
search.scoring_strategy(
|
||||
@@ -1132,6 +1139,7 @@ pub fn perform_search(
|
||||
document_scores,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
query_vector,
|
||||
},
|
||||
semantic_hit_count,
|
||||
) = search_from_kind(index_uid, search_kind, search)?;
|
||||
@@ -1222,6 +1230,7 @@ pub fn perform_search(
|
||||
hits: documents,
|
||||
hits_info,
|
||||
query: q.unwrap_or_default(),
|
||||
query_vector,
|
||||
processing_time_ms: before_search.elapsed().as_millis(),
|
||||
facet_distribution,
|
||||
facet_stats,
|
||||
@@ -1734,6 +1743,7 @@ pub fn perform_similar(
|
||||
document_scores,
|
||||
degraded: _,
|
||||
used_negative_operator: _,
|
||||
query_vector: _,
|
||||
} = similar.execute().map_err(|err| match err {
|
||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||
ResponseError::from_msg(err.to_string(), Code::InvalidSimilarFilter)
|
||||
@@ -2081,7 +2091,7 @@ pub(crate) fn parse_filter(
|
||||
})?;
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
// If the contains operator is used while the contains filter features is not enabled, errors out
|
||||
// If the contains operator is used while the contains filter feature is not enabled, errors out
|
||||
if let Some((token, error)) =
|
||||
filter.use_contains_operator().zip(features.check_contains_filter().err())
|
||||
{
|
||||
@@ -2092,6 +2102,18 @@ pub(crate) fn parse_filter(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
// If a vector filter is used while the multi modal feature is not enabled, errors out
|
||||
if let Some((token, error)) =
|
||||
filter.use_vector_filter().zip(features.check_multimodal("using a vector filter").err())
|
||||
{
|
||||
return Err(ResponseError::from_msg(
|
||||
token.as_external_error(error).to_string(),
|
||||
Code::FeatureNotEnabled,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(filter)
|
||||
}
|
||||
|
||||
|
||||
@@ -421,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r#"
|
||||
{
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`",
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
||||
@@ -304,7 +304,7 @@ async fn access_authorized_stats_restricted_index() {
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
@@ -344,7 +344,7 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
@@ -384,7 +384,7 @@ async fn list_authorized_indexes_restricted_index() {
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
@@ -425,7 +425,7 @@ async fn list_authorized_indexes_no_index_restriction() {
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
@@ -507,10 +507,10 @@ async fn access_authorized_index_patterns() {
|
||||
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
// refer to products_1 with modified api key.
|
||||
// refer to products_1 with a modified api key.
|
||||
let index_1 = server.index("products_1");
|
||||
|
||||
index_1.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index_1.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
@@ -578,19 +578,19 @@ async fn raise_error_non_authorized_index_patterns() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task2_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
// Adding document to test index. Should Fail with 403 -- invalid_api_key
|
||||
// Adding a document to test index. Should Fail with 403 -- invalid_api_key
|
||||
let (response, code) = test_index.add_documents(documents, None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
// refer to products_1 with modified api key.
|
||||
// refer to products_1 with a modified api key.
|
||||
let product_1_index = server.index("products_1");
|
||||
// refer to products_2 with modified api key.
|
||||
let product_2_index = server.index("products_2");
|
||||
// refer to products_2 with a modified api key.
|
||||
// let product_2_index = server.index("products_2");
|
||||
|
||||
product_1_index.wait_task(task1_id).await;
|
||||
product_2_index.wait_task(task2_id).await;
|
||||
server.wait_task(task1_id).await;
|
||||
server.wait_task(task2_id).await;
|
||||
|
||||
let (response, code) = product_1_index.get_task(task1_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
@@ -603,7 +603,7 @@ async fn raise_error_non_authorized_index_patterns() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn pattern_indexes() {
|
||||
// Create server with master key
|
||||
// Create a server with master key
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
@@ -650,7 +650,7 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
// create key with access on `products` index only.
|
||||
let content = json!({
|
||||
@@ -690,7 +690,7 @@ async fn list_authorized_tasks_no_index_restriction() {
|
||||
let (response, code) = index.create(Some("product_id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
@@ -757,7 +757,7 @@ async fn error_creating_index_without_action() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
let response = server.wait_task(task_id).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_error.clone());
|
||||
|
||||
@@ -768,7 +768,7 @@ async fn error_creating_index_without_action() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
let response = server.wait_task(task_id).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_error.clone());
|
||||
@@ -778,7 +778,7 @@ async fn error_creating_index_without_action() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
let response = index.wait_task(task_id).await;
|
||||
let response = server.wait_task(task_id).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
assert_eq!(response["error"], expected_error.clone());
|
||||
@@ -830,7 +830,7 @@ async fn lazy_create_index() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
@@ -844,7 +844,7 @@ async fn lazy_create_index() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
@@ -856,7 +856,7 @@ async fn lazy_create_index() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
@@ -911,7 +911,7 @@ async fn lazy_create_index_from_pattern() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
@@ -929,7 +929,7 @@ async fn lazy_create_index_from_pattern() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
@@ -949,7 +949,7 @@ async fn lazy_create_index_from_pattern() {
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
server.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
|
||||
@@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
||||
@@ -100,11 +100,11 @@ macro_rules! compute_authorized_search {
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task1,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task1.uid()).await.succeeded();
|
||||
server.wait_task(task1.uid()).await.succeeded();
|
||||
let (task2,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(task2.uid()).await.succeeded();
|
||||
server.wait_task(task2.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
for key_content in ACCEPTED_KEYS.iter() {
|
||||
@@ -147,7 +147,7 @@ macro_rules! compute_forbidden_search {
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
for key_content in $parent_keys.iter() {
|
||||
|
||||
@@ -268,21 +268,21 @@ macro_rules! compute_authorized_single_search {
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (add_task,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
server.wait_task(add_task.uid()).await.succeeded();
|
||||
let (update_task,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(update_task.uid()).await.succeeded();
|
||||
server.wait_task(update_task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
let index = server.index("products");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (add_task2,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task2.uid()).await.succeeded();
|
||||
server.wait_task(add_task2.uid()).await.succeeded();
|
||||
let (update_task2,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["doggos"]}))
|
||||
.await;
|
||||
index.wait_task(update_task2.uid()).await.succeeded();
|
||||
server.wait_task(update_task2.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
|
||||
@@ -339,21 +339,21 @@ macro_rules! compute_authorized_multiple_search {
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
let index = server.index("products");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["doggos"]}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
|
||||
@@ -423,21 +423,21 @@ macro_rules! compute_forbidden_single_search {
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
let index = server.index("products");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["doggos"]}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
assert_eq!($parent_keys.len(), $failed_query_indexes.len(), "keys != query_indexes");
|
||||
@@ -499,21 +499,21 @@ macro_rules! compute_forbidden_multiple_search {
|
||||
let index = server.index("sales");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["color"]}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
let index = server.index("products");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task,_status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task,_status_code) = index
|
||||
.update_settings(json!({"filterableAttributes": ["doggos"]}))
|
||||
.await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
drop(index);
|
||||
|
||||
assert_eq!($parent_keys.len(), $failed_query_indexes.len(), "keys != query_indexes");
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,15 +1,13 @@
|
||||
use std::fmt::Write;
|
||||
use std::marker::PhantomData;
|
||||
use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use tokio::time::sleep;
|
||||
use urlencoding::encode as urlencode;
|
||||
|
||||
use super::encoder::Encoder;
|
||||
use super::service::Service;
|
||||
use super::{Owned, Shared, Value};
|
||||
use super::{Owned, Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
pub struct Index<'a, State = Owned> {
|
||||
@@ -33,7 +31,7 @@ impl<'a> Index<'a, Owned> {
|
||||
Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData }
|
||||
}
|
||||
|
||||
pub async fn load_test_set(&self) -> u64 {
|
||||
pub async fn load_test_set<State>(&self, waiter: &Server<State>) -> u64 {
|
||||
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
|
||||
let (response, code) = self
|
||||
.service
|
||||
@@ -44,12 +42,12 @@ impl<'a> Index<'a, Owned> {
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 202);
|
||||
let update_id = response["taskUid"].as_i64().unwrap();
|
||||
self.wait_task(update_id as u64).await;
|
||||
update_id as u64
|
||||
let update_id = response["taskUid"].as_u64().unwrap();
|
||||
waiter.wait_task(update_id).await;
|
||||
update_id
|
||||
}
|
||||
|
||||
pub async fn load_test_set_ndjson(&self) -> u64 {
|
||||
pub async fn load_test_set_ndjson<State>(&self, waiter: &Server<State>) -> u64 {
|
||||
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
|
||||
let (response, code) = self
|
||||
.service
|
||||
@@ -60,9 +58,9 @@ impl<'a> Index<'a, Owned> {
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 202);
|
||||
let update_id = response["taskUid"].as_i64().unwrap();
|
||||
self.wait_task(update_id as u64).await;
|
||||
update_id as u64
|
||||
let update_id = response["taskUid"].as_u64().unwrap();
|
||||
waiter.wait_task(update_id).await;
|
||||
update_id
|
||||
}
|
||||
|
||||
pub async fn create(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
@@ -251,6 +249,11 @@ impl<'a> Index<'a, Owned> {
|
||||
self.service.put_encoded(url, settings, self.encoder).await
|
||||
}
|
||||
|
||||
pub async fn update_settings_chat(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/chat", urlencode(self.uid.as_ref()));
|
||||
self.service.patch_encoded(url, settings, self.encoder).await
|
||||
}
|
||||
|
||||
pub async fn delete_settings(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
@@ -267,10 +270,14 @@ impl Index<'_, Shared> {
|
||||
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
|
||||
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
|
||||
/// and if it succeed the function will panic.
|
||||
pub async fn delete_document_by_filter_fail(&self, body: Value) -> (Value, StatusCode) {
|
||||
pub async fn delete_document_by_filter_fail<State>(
|
||||
&self,
|
||||
body: Value,
|
||||
waiter: &Server<State>,
|
||||
) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._delete_document_by_filter(body).await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
task = waiter.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`delete_document_by_filter_fail` succeeded: {}",
|
||||
@@ -281,10 +288,10 @@ impl Index<'_, Shared> {
|
||||
(task, code)
|
||||
}
|
||||
|
||||
pub async fn delete_index_fail(&self) -> (Value, StatusCode) {
|
||||
pub async fn delete_index_fail<State>(&self, waiter: &Server<State>) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._delete().await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
task = waiter.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`delete_index_fail` succeeded: {}",
|
||||
@@ -295,10 +302,14 @@ impl Index<'_, Shared> {
|
||||
(task, code)
|
||||
}
|
||||
|
||||
pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
pub async fn update_index_fail<State>(
|
||||
&self,
|
||||
primary_key: Option<&str>,
|
||||
waiter: &Server<State>,
|
||||
) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._update(primary_key).await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
task = waiter.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`update_index_fail` succeeded: {}",
|
||||
@@ -364,23 +375,6 @@ impl<State> Index<'_, State> {
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..100 {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
}
|
||||
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
self.service.get(url).await
|
||||
|
||||
@@ -3,10 +3,8 @@ pub mod index;
|
||||
pub mod server;
|
||||
pub mod service;
|
||||
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt::{self, Display},
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::{self, Display};
|
||||
|
||||
use actix_http::StatusCode;
|
||||
#[allow(unused)]
|
||||
@@ -17,10 +15,8 @@ use serde::{Deserialize, Serialize};
|
||||
#[allow(unused)]
|
||||
pub use server::{default_settings, Server};
|
||||
use tokio::sync::OnceCell;
|
||||
use wiremock::{
|
||||
matchers::{method, path},
|
||||
Mock, MockServer, Request, ResponseTemplate,
|
||||
};
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
|
||||
|
||||
use crate::common::index::Index;
|
||||
|
||||
@@ -46,6 +42,15 @@ impl Value {
|
||||
self["uid"].as_u64().is_some() || self["taskUid"].as_u64().is_some()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn batch_uid(&self) -> u32 {
|
||||
if let Some(batch_uid) = self["batchUid"].as_u64() {
|
||||
batch_uid as u32
|
||||
} else {
|
||||
panic!("Didn't find `batchUid` in: {self}");
|
||||
}
|
||||
}
|
||||
|
||||
/// Return `true` if the `status` field is set to `succeeded`.
|
||||
/// Panic if the `status` field doesn't exists.
|
||||
#[track_caller]
|
||||
@@ -189,7 +194,7 @@ pub async fn shared_empty_index() -> &'static Index<'static, Shared> {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("EMPTY_INDEX").to_shared();
|
||||
let (response, _code) = index._create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
})
|
||||
.await
|
||||
@@ -237,13 +242,13 @@ pub async fn shared_index_with_documents() -> &'static Index<'static, Shared> {
|
||||
let index = server._index("SHARED_DOCUMENTS").to_shared();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
@@ -280,13 +285,13 @@ pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shar
|
||||
let index = server._index("SHARED_SCORE_DOCUMENTS").to_shared();
|
||||
let documents = SCORE_DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
@@ -357,13 +362,13 @@ pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Sha
|
||||
let index = server._index("SHARED_NESTED_DOCUMENTS").to_shared();
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["father", "doggos", "cattos"], "sortableAttributes": ["doggos"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
@@ -457,7 +462,7 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> {
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
})
|
||||
.await
|
||||
@@ -504,14 +509,14 @@ pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SHARED_GEO_DOCUMENTS").to_shared();
|
||||
let (response, _code) = index._add_documents(GEO_DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["_geo"], "sortableAttributes": ["_geo"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
})
|
||||
.await
|
||||
@@ -609,7 +614,7 @@ pub async fn init_fragments_index() -> (Server<Owned>, String, crate::common::Va
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, StatusCode::ACCEPTED);
|
||||
|
||||
let _task = index.wait_task(value.uid()).await.succeeded();
|
||||
let _task = server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let uid = index.uid.clone();
|
||||
(server, uid, settings)
|
||||
@@ -674,7 +679,7 @@ pub async fn init_fragments_index_composite() -> (Server<Owned>, String, crate::
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, StatusCode::ACCEPTED);
|
||||
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let uid = index.uid.clone();
|
||||
(server, uid, settings)
|
||||
|
||||
@@ -182,6 +182,25 @@ impl Server<Owned> {
|
||||
self.service.patch("/network", value).await
|
||||
}
|
||||
|
||||
pub async fn create_webhook(&self, value: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/webhooks", value).await
|
||||
}
|
||||
|
||||
pub async fn get_webhook(&self, uuid: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/webhooks/{}", uuid.as_ref());
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn delete_webhook(&self, uuid: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/webhooks/{}", uuid.as_ref());
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn patch_webhook(&self, uuid: impl AsRef<str>, value: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/webhooks/{}", uuid.as_ref());
|
||||
self.service.patch(url, value).await
|
||||
}
|
||||
|
||||
pub async fn get_metrics(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/metrics").await
|
||||
}
|
||||
@@ -409,12 +428,12 @@ impl<State> Server<State> {
|
||||
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
let max_attempts = 400; // 200 seconds total, 0.5s per attempt
|
||||
let url = format!("/tasks/{update_id}");
|
||||
let max_attempts = 400; // 200 seconds in total, 0.5secs per attempt
|
||||
|
||||
for i in 0..max_attempts {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
let (response, status_code) = self.service.get(url.clone()).await;
|
||||
assert_eq!(200, status_code, "response: {response}");
|
||||
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
@@ -447,6 +466,10 @@ impl<State> Server<State> {
|
||||
pub async fn get_network(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/network").await
|
||||
}
|
||||
|
||||
pub async fn get_webhooks(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/webhooks").await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
|
||||
@@ -1318,7 +1318,7 @@ async fn add_no_documents() {
|
||||
async fn add_larger_dataset() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let update_id = index.load_test_set().await;
|
||||
let update_id = index.load_test_set(server).await;
|
||||
let (response, code) = index.get_task(update_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
@@ -1333,7 +1333,7 @@ async fn add_larger_dataset() {
|
||||
|
||||
// x-ndjson add large test
|
||||
let index = server.unique_index();
|
||||
let update_id = index.load_test_set_ndjson().await;
|
||||
let update_id = index.load_test_set_ndjson(server).await;
|
||||
let (response, code) = index.get_task(update_id).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
@@ -7,7 +7,8 @@ use crate::json;
|
||||
async fn delete_one_document_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
|
||||
let (task, code) =
|
||||
index.delete_document_by_filter_fail(json!({"filter": "a = b"}), server).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
|
||||
@@ -557,9 +557,9 @@ async fn delete_document_by_filter() {
|
||||
"###);
|
||||
|
||||
let index = shared_does_not_exists_index().await;
|
||||
// index does not exists
|
||||
// index does not exist
|
||||
let (response, _code) =
|
||||
index.delete_document_by_filter_fail(json!({ "filter": "doggo = bernese"})).await;
|
||||
index.delete_document_by_filter_fail(json!({ "filter": "doggo = bernese"}), server).await;
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@@ -589,7 +589,7 @@ async fn delete_document_by_filter() {
|
||||
// no filterable are set
|
||||
let index = shared_empty_index().await;
|
||||
let (response, _code) =
|
||||
index.delete_document_by_filter_fail(json!({ "filter": "doggo = bernese"})).await;
|
||||
index.delete_document_by_filter_fail(json!({ "filter": "doggo = bernese"}), server).await;
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
@@ -619,7 +619,7 @@ async fn delete_document_by_filter() {
|
||||
// not filterable while there is a filterable attribute
|
||||
let index = shared_index_with_documents().await;
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter_fail(json!({ "filter": "catto = jorts"})).await;
|
||||
index.delete_document_by_filter_fail(json!({ "filter": "catto = jorts"}), server).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = server.wait_task(response.uid()).await.failed();
|
||||
snapshot!(response, @r###"
|
||||
|
||||
@@ -87,7 +87,7 @@ async fn get_document() {
|
||||
async fn get_document_sorted() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.load_test_set().await;
|
||||
index.load_test_set(server).await;
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings_sortable_attributes(json!(["age", "email", "gender", "name"])).await;
|
||||
@@ -639,7 +639,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
||||
async fn get_documents_displayed_attributes_is_ignored() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.load_test_set().await;
|
||||
index.load_test_set(server).await;
|
||||
index.update_settings(json!({"displayedAttributes": ["gender"]})).await;
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
|
||||
@@ -2366,7 +2366,7 @@ async fn generate_and_import_dump_containing_vectors() {
|
||||
))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
snapshot!(response);
|
||||
let (response, code) = index
|
||||
.add_documents(
|
||||
@@ -2381,12 +2381,12 @@ async fn generate_and_import_dump_containing_vectors() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
snapshot!(response);
|
||||
|
||||
let (response, code) = server.create_dump().await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
|
||||
// ========= We made a dump, now we should clear the DB and try to import our dump
|
||||
|
||||
@@ -161,9 +161,9 @@ async fn test_create_multiple_indexes() {
|
||||
let (task2, _) = index2.create(None).await;
|
||||
let (task3, _) = index3.create(None).await;
|
||||
|
||||
index1.wait_task(task1.uid()).await.succeeded();
|
||||
index2.wait_task(task2.uid()).await.succeeded();
|
||||
index3.wait_task(task3.uid()).await.succeeded();
|
||||
server.wait_task(task1.uid()).await.succeeded();
|
||||
server.wait_task(task2.uid()).await.succeeded();
|
||||
server.wait_task(task3.uid()).await.succeeded();
|
||||
|
||||
assert_eq!(index1.get().await.1, 200);
|
||||
assert_eq!(index2.get().await.1, 200);
|
||||
|
||||
@@ -26,7 +26,7 @@ async fn create_and_delete_index() {
|
||||
async fn error_delete_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.delete_index_fail().await;
|
||||
let (task, code) = index.delete_index_fail(server).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
|
||||
@@ -60,8 +60,8 @@ async fn list_multiple_indexes() {
|
||||
let index_with_key = server.unique_index();
|
||||
let (response_with_key, _status_code) = index_with_key.create(Some("key")).await;
|
||||
|
||||
index_without_key.wait_task(response_without_key.uid()).await.succeeded();
|
||||
index_with_key.wait_task(response_with_key.uid()).await.succeeded();
|
||||
server.wait_task(response_without_key.uid()).await.succeeded();
|
||||
server.wait_task(response_with_key.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server.list_indexes(None, Some(1000)).await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -81,8 +81,9 @@ async fn get_and_paginate_indexes() {
|
||||
let server = Server::new().await;
|
||||
const NB_INDEXES: usize = 50;
|
||||
for i in 0..NB_INDEXES {
|
||||
server.index(format!("test_{i:02}")).create(None).await;
|
||||
server.index(format!("test_{i:02}")).wait_task(i as u64).await;
|
||||
let (task, code) = server.index(format!("test_{i:02}")).create(None).await;
|
||||
assert_eq!(code, 202);
|
||||
server.wait_task(task.uid()).await;
|
||||
}
|
||||
|
||||
// basic
|
||||
|
||||
@@ -72,7 +72,7 @@ async fn error_update_existing_primary_key() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
let (update_task, code) = index.update_index_fail(Some("primary")).await;
|
||||
let (update_task, code) = index.update_index_fail(Some("primary"), server).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
let response = server.wait_task(update_task.uid()).await.failed();
|
||||
@@ -91,7 +91,7 @@ async fn error_update_existing_primary_key() {
|
||||
async fn error_update_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
|
||||
let (task, code) = index.update_index_fail(Some("my-primary-key"), server).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
|
||||
@@ -304,7 +304,7 @@ async fn search_bad_filter() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// Also, to trigger the error message we need to effectively create the index or else it'll throw an
|
||||
// index does not exists error.
|
||||
// index does not exist error.
|
||||
let (response, _code) = index.create(None).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
@@ -1263,34 +1263,34 @@ async fn search_with_contains_without_enabling_the_feature() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// Also, to trigger the error message we need to effectively create the index or else it'll throw an
|
||||
// index does not exists error.
|
||||
// index does not exist error.
|
||||
let (task, _code) = index.create(None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({ "filter": "doggo CONTAINS kefir" }), |response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
})
|
||||
.await;
|
||||
index
|
||||
.search(json!({ "filter": "doggo != echo AND doggo CONTAINS kefir" }), |response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n25:33 doggo != echo AND doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n25:33 doggo != echo AND doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
})
|
||||
.await;
|
||||
|
||||
@@ -1299,24 +1299,24 @@ async fn search_with_contains_without_enabling_the_feature() {
|
||||
index.search_post(json!({ "filter": ["doggo != echo", "doggo CONTAINS kefir"] })).await;
|
||||
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
let (response, code) =
|
||||
index.search_post(json!({ "filter": ["doggo != echo", ["doggo CONTAINS kefir"]] })).await;
|
||||
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Using `CONTAINS` or `STARTS WITH` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"message": "Using `CONTAINS` in a filter requires enabling the `contains filter` experimental feature. See https://github.com/orgs/meilisearch/discussions/763\n7:15 doggo CONTAINS kefir",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ use tempfile::TempDir;
|
||||
|
||||
use super::test_settings_documents_indexing_swapping_and_search;
|
||||
use crate::common::{
|
||||
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
|
||||
DOCUMENTS, NESTED_DOCUMENTS,
|
||||
default_settings, shared_index_for_fragments, shared_index_with_documents,
|
||||
shared_index_with_nested_documents, Server, DOCUMENTS, NESTED_DOCUMENTS,
|
||||
};
|
||||
use crate::json;
|
||||
|
||||
@@ -731,3 +731,432 @@ async fn test_filterable_attributes_priority() {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_all_embedders() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "kefir"
|
||||
},
|
||||
{
|
||||
"name": "echo"
|
||||
},
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "dustin"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_missing_fragment() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.fragments EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "The vector filter is missing a fragment name.\n24:31 _vectors.rest.fragments EXISTS",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_nonexistent_embedder() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.other EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Index `[uuid]`: The embedder `other` does not exist. Available embedders are: `rest`.\n10:15 _vectors.other EXISTS",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_all_embedders_user_provided() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
// This one is counterintuitive, but it is the same as the previous one.
|
||||
// It's because userProvided is interpreted as an embedder name
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.userProvided EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Index `[uuid]`: The embedder `userProvided` does not exist. Available embedders are: `rest`.\n10:22 _vectors.userProvided EXISTS",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_specific_embedder() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "kefir"
|
||||
},
|
||||
{
|
||||
"name": "echo"
|
||||
},
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "dustin"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_user_provided() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.userProvided EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "echo"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_specific_fragment() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.fragments.withBreed EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "dustin"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.fragments.basic EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "kefir"
|
||||
},
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "dustin"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_non_existant_fragment() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.fragments.withBred EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Index `[uuid]`: The fragment `withBred` does not exist on embedder `rest`. Available fragments on this embedder are: `basic`, `withBreed`. Did you mean `withBreed`?\n25:33 _vectors.rest.fragments.withBred EXISTS",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_document_template_but_fragments_used() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.documentTemplate EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_document_template() {
|
||||
let (_mock, setting) = crate::vector::create_mock().await;
|
||||
let server = crate::vector::get_server_vector().await;
|
||||
let index = server.index("doggo");
|
||||
|
||||
let (_response, code) = server.set_features(json!({"multimodal": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"embedders": {
|
||||
"rest": setting,
|
||||
},
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{"id": 0, "name": "kefir"},
|
||||
{"id": 1, "name": "echo", "_vectors": { "rest": [1, 1, 1] }},
|
||||
{"id": 2, "name": "intel"},
|
||||
{"id": 3, "name": "iko" }
|
||||
]);
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.documentTemplate EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "kefir"
|
||||
},
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "iko"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_feature_gate() {
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "using a vector filter requires enabling the `multimodal` experimental feature. See https://github.com/orgs/meilisearch/discussions/846\n1:9 _vectors EXISTS",
|
||||
"code": "feature_not_enabled",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_negation() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.userProvided NOT EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "kefir"
|
||||
},
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "dustin"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_or_combination() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": "_vectors.rest.fragments.withBreed EXISTS OR _vectors.rest.userProvided EXISTS",
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "echo"
|
||||
},
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "dustin"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn vector_filter_regenerate() {
|
||||
let index = shared_index_for_fragments().await;
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
"filter": format!("_vectors.rest.regenerate EXISTS"),
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "kefir"
|
||||
},
|
||||
{
|
||||
"name": "intel"
|
||||
},
|
||||
{
|
||||
"name": "dustin"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
@@ -148,7 +148,70 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}}},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}}},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}}}]"###);
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"id": "2",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "3",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
2.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"id": "1",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"query": "Captain",
|
||||
"queryVector": [
|
||||
1.0,
|
||||
1.0
|
||||
],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
|
||||
let (response, code) = index
|
||||
@@ -157,7 +220,73 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.9848484848484848},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "3",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
2.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.990290343761444
|
||||
},
|
||||
{
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"id": "2",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.9848484848484848
|
||||
},
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"id": "1",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.9472135901451112
|
||||
}
|
||||
],
|
||||
"query": "Captain",
|
||||
"queryVector": [
|
||||
1.0,
|
||||
1.0
|
||||
],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"semanticHitCount": 2
|
||||
}
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"2");
|
||||
|
||||
let (response, code) = index
|
||||
@@ -166,7 +295,73 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"id": "3",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
2.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.990290343761444
|
||||
},
|
||||
{
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"id": "2",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
2.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.974341630935669
|
||||
},
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"id": "1",
|
||||
"_vectors": {
|
||||
"default": {
|
||||
"embeddings": [
|
||||
[
|
||||
1.0,
|
||||
3.0
|
||||
]
|
||||
],
|
||||
"regenerate": false
|
||||
}
|
||||
},
|
||||
"_rankingScore": 0.9472135901451112
|
||||
}
|
||||
],
|
||||
"query": "Captain",
|
||||
"queryVector": [
|
||||
1.0,
|
||||
1.0
|
||||
],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"semanticHitCount": 3
|
||||
}
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"3");
|
||||
}
|
||||
|
||||
|
||||
@@ -3703,7 +3703,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3911,9 +3911,20 @@ async fn federation_vector_two_indexes() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 8,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.5
|
||||
],
|
||||
"1": [
|
||||
0.8,
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 6
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// hybrid search, distinct embedder
|
||||
let (response, code) = server
|
||||
@@ -3923,7 +3934,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4139,9 +4150,20 @@ async fn federation_vector_two_indexes() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 8,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.5
|
||||
],
|
||||
"1": [
|
||||
-1.0,
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 8
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -2,8 +2,9 @@ use std::sync::Arc;
|
||||
|
||||
use actix_http::StatusCode;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use wiremock::matchers::AnyMatcher;
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::matchers::{path, AnyMatcher};
|
||||
use wiremock::{Mock, MockServer, Request, ResponseTemplate};
|
||||
|
||||
use crate::common::{Server, Value, SCORE_DOCUMENTS};
|
||||
use crate::json;
|
||||
@@ -158,11 +159,11 @@ async fn remote_sharding() {
|
||||
let index1 = ms1.index("test");
|
||||
let index2 = ms2.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index2.add_documents(json!(documents[3..5]), None).await;
|
||||
index2.wait_task(task.uid()).await.succeeded();
|
||||
ms2.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -415,6 +416,503 @@ async fn remote_sharding() {
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn remote_sharding_retrieve_vectors() {
|
||||
let ms0 = Server::new().await;
|
||||
let ms1 = Server::new().await;
|
||||
let ms2 = Server::new().await;
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let index2 = ms2.index("test");
|
||||
|
||||
// enable feature
|
||||
|
||||
let (response, code) = ms0.set_features(json!({"network": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["network"]), @"true");
|
||||
let (response, code) = ms1.set_features(json!({"network": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["network"]), @"true");
|
||||
let (response, code) = ms2.set_features(json!({"network": true})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["network"]), @"true");
|
||||
|
||||
// set self
|
||||
|
||||
let (response, code) = ms0.set_network(json!({"self": "ms0"})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms0",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms1.set_network(json!({"self": "ms1"})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms1",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
let (response, code) = ms2.set_network(json!({"self": "ms2"})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"self": "ms2",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
|
||||
// setup embedders
|
||||
|
||||
let mock_server = MockServer::start().await;
|
||||
Mock::given(method("POST"))
|
||||
.and(path("/"))
|
||||
.respond_with(move |req: &Request| {
|
||||
println!("Received request: {:?}", req);
|
||||
let text = req.body_json::<String>().unwrap().to_lowercase();
|
||||
let patterns = [
|
||||
("batman", [1.0, 0.0, 0.0]),
|
||||
("dark", [0.0, 0.1, 0.0]),
|
||||
("knight", [0.1, 0.1, 0.0]),
|
||||
("returns", [0.0, 0.0, 0.2]),
|
||||
("part", [0.05, 0.1, 0.0]),
|
||||
("1", [0.3, 0.05, 0.0]),
|
||||
("2", [0.2, 0.05, 0.0]),
|
||||
];
|
||||
let mut embedding = vec![0.; 3];
|
||||
for (pattern, vector) in patterns {
|
||||
if text.contains(pattern) {
|
||||
for (i, v) in vector.iter().enumerate() {
|
||||
embedding[i] += v;
|
||||
}
|
||||
}
|
||||
}
|
||||
ResponseTemplate::new(200).set_body_json(json!({ "data": embedding }))
|
||||
})
|
||||
.mount(&mock_server)
|
||||
.await;
|
||||
let url = mock_server.uri();
|
||||
|
||||
for (server, index) in [(&ms0, &index0), (&ms1, &index1), (&ms2, &index2)] {
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"embedders": {
|
||||
"rest": {
|
||||
"source": "rest",
|
||||
"url": url,
|
||||
"dimensions": 3,
|
||||
"request": "{{text}}",
|
||||
"response": { "data": "{{embedding}}" },
|
||||
"documentTemplate": "{{doc.name}}",
|
||||
},
|
||||
},
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
let ms1 = Arc::new(ms1);
|
||||
let ms2 = Arc::new(ms2);
|
||||
|
||||
let rms0 = LocalMeili::new(ms0.clone()).await;
|
||||
let rms1 = LocalMeili::new(ms1.clone()).await;
|
||||
let rms2 = LocalMeili::new(ms2.clone()).await;
|
||||
|
||||
// set network
|
||||
let network = json!({"remotes": {
|
||||
"ms0": {
|
||||
"url": rms0.url()
|
||||
},
|
||||
"ms1": {
|
||||
"url": rms1.url()
|
||||
},
|
||||
"ms2": {
|
||||
"url": rms2.url()
|
||||
}
|
||||
}});
|
||||
|
||||
let (_response, status_code) = ms0.set_network(network.clone()).await;
|
||||
snapshot!(status_code, @"200 OK");
|
||||
let (_response, status_code) = ms1.set_network(network.clone()).await;
|
||||
snapshot!(status_code, @"200 OK");
|
||||
let (_response, status_code) = ms2.set_network(network.clone()).await;
|
||||
snapshot!(status_code, @"200 OK");
|
||||
|
||||
// multi vector search: one query per remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms2"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi vector search: two local queries, one remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms2"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi vector search: two queries on the same remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi search: two vector, one keyword
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 0.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// multi vector search: no local queries, all remote
|
||||
|
||||
let request = json!({
|
||||
"federation": {},
|
||||
"queries": [
|
||||
{
|
||||
"q": "batman",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "dark knight",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"q": "returns",
|
||||
"indexUid": "test",
|
||||
"hybrid": {
|
||||
"semanticRatio": 1.0,
|
||||
"embedder": "rest"
|
||||
},
|
||||
"retrieveVectors": true,
|
||||
"federationOptions": {
|
||||
"remote": "ms1"
|
||||
}
|
||||
},
|
||||
]
|
||||
});
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"queryVectors": {
|
||||
"0": [
|
||||
1.0,
|
||||
0.0,
|
||||
0.0
|
||||
],
|
||||
"1": [
|
||||
0.1,
|
||||
0.2,
|
||||
0.0
|
||||
],
|
||||
"2": [
|
||||
0.0,
|
||||
0.0,
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_unregistered_remote() {
|
||||
let ms0 = Server::new().await;
|
||||
@@ -454,9 +952,9 @@ async fn error_unregistered_remote() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -572,9 +1070,9 @@ async fn error_no_weighted_score() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -705,9 +1203,9 @@ async fn error_bad_response() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -842,9 +1340,9 @@ async fn error_bad_request() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -972,10 +1470,10 @@ async fn error_bad_request_facets_by_index() {
|
||||
let index0 = ms0.index("test0");
|
||||
let index1 = ms1.index("test1");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -1113,13 +1611,13 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index0.update_settings_filterable_attributes(json!(["id"])).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -1224,6 +1722,7 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn error_remote_does_not_answer() {
|
||||
let ms0 = Server::new().await;
|
||||
let ms1 = Server::new().await;
|
||||
@@ -1262,9 +1761,9 @@ async fn error_remote_does_not_answer() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -1463,9 +1962,9 @@ async fn error_remote_404() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -1658,9 +2157,9 @@ async fn error_remote_sharding_auth() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
ms1.clear_api_key();
|
||||
@@ -1818,9 +2317,9 @@ async fn remote_sharding_auth() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
ms1.clear_api_key();
|
||||
@@ -1973,9 +2472,9 @@ async fn error_remote_500() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -2152,9 +2651,9 @@ async fn error_remote_500_once() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
@@ -2335,9 +2834,9 @@ async fn error_remote_timeout() {
|
||||
let index0 = ms0.index("test");
|
||||
let index1 = ms1.index("test");
|
||||
let (task, _status_code) = index0.add_documents(json!(documents[0..2]), None).await;
|
||||
index0.wait_task(task.uid()).await.succeeded();
|
||||
ms0.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index1.add_documents(json!(documents[2..3]), None).await;
|
||||
index1.wait_task(task.uid()).await.succeeded();
|
||||
ms1.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wrap servers
|
||||
let ms0 = Arc::new(ms0);
|
||||
|
||||
66
crates/meilisearch/tests/settings/chat.rs
Normal file
66
crates/meilisearch/tests/settings/chat.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use crate::common::Server;
|
||||
use crate::json;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_reset_chat_issue_5772() {
|
||||
let server = Server::new().await;
|
||||
let index = server.unique_index();
|
||||
|
||||
let (_, code) = server
|
||||
.set_features(json!({
|
||||
"chatCompletions": true,
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @r#"200 OK"#);
|
||||
|
||||
let (task1, _code) = index.update_settings_chat(json!({
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 15,
|
||||
"sort": [],
|
||||
"attributesToSearchOn": []
|
||||
}
|
||||
})).await;
|
||||
server.wait_task(task1.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
snapshot!(json_string!(response["chat"]), @r#"
|
||||
{
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 15,
|
||||
"sort": [],
|
||||
"attributesToSearchOn": []
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (task2, _status_code) = index.update_settings_chat(json!({
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 16
|
||||
}
|
||||
})).await;
|
||||
server.wait_task(task2.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.settings().await;
|
||||
snapshot!(json_string!(response["chat"]), @r#"
|
||||
{
|
||||
"description": "test!",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
"documentTemplateMaxBytes": 400,
|
||||
"searchParameters": {
|
||||
"limit": 16,
|
||||
"sort": [],
|
||||
"attributesToSearchOn": []
|
||||
}
|
||||
}
|
||||
"#);
|
||||
}
|
||||
@@ -186,7 +186,7 @@ test_setting_routes!(
|
||||
},
|
||||
{
|
||||
setting: chat,
|
||||
update_verb: put,
|
||||
update_verb: patch,
|
||||
default_value: {
|
||||
"description": "",
|
||||
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod chat;
|
||||
mod distinct;
|
||||
mod errors;
|
||||
mod get_settings;
|
||||
|
||||
@@ -298,7 +298,7 @@ async fn similar_bad_filter() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({ "id": 287947, "filter": true, "embedder": "manual" })).await;
|
||||
@@ -335,7 +335,7 @@ async fn filter_invalid_syntax_object() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "filter": "title & Glass", "embedder": "manual"}), |response, code| {
|
||||
@@ -373,7 +373,7 @@ async fn filter_invalid_syntax_array() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "filter": ["title & Glass"], "embedder": "manual"}), |response, code| {
|
||||
@@ -411,7 +411,7 @@ async fn filter_invalid_syntax_string() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
|
||||
@@ -451,7 +451,7 @@ async fn filter_invalid_attribute_array() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(
|
||||
@@ -492,7 +492,7 @@ async fn filter_invalid_attribute_string() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.similar(
|
||||
@@ -533,7 +533,7 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:13 _geo = Glass",
|
||||
@@ -573,7 +573,7 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:13 _geo = Glass",
|
||||
@@ -613,7 +613,7 @@ async fn filter_reserved_attribute_array() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:21 _geoDistance = Glass",
|
||||
@@ -653,7 +653,7 @@ async fn filter_reserved_attribute_string() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:21 _geoDistance = Glass",
|
||||
@@ -693,7 +693,7 @@ async fn filter_reserved_geo_point_array() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:18 _geoPoint = Glass",
|
||||
@@ -733,7 +733,7 @@ async fn filter_reserved_geo_point_string() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await.succeeded();
|
||||
server.wait_task(value.uid()).await.succeeded();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:18 _geoPoint = Glass",
|
||||
@@ -825,7 +825,7 @@ async fn similar_bad_embedder() {
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(value.uid()).await;
|
||||
server.wait_task(value.uid()).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Cannot find embedder with name `auto`.",
|
||||
|
||||
@@ -51,12 +51,12 @@ async fn perform_snapshot() {
|
||||
}))
|
||||
.await;
|
||||
|
||||
index.load_test_set().await;
|
||||
index.load_test_set(&server).await;
|
||||
|
||||
let (task, code) = server.index("test1").create(Some("prim")).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// wait for the _next task_ to process, aka the snapshot that should be enqueued at some point
|
||||
|
||||
@@ -122,19 +122,15 @@ async fn perform_on_demand_snapshot() {
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let index = server.index("catto");
|
||||
index
|
||||
.update_settings(json! ({
|
||||
"searchableAttributes": [],
|
||||
}))
|
||||
.await;
|
||||
index.update_settings(json! ({ "searchableAttributes": [] })).await;
|
||||
|
||||
index.load_test_set().await;
|
||||
index.load_test_set(&server).await;
|
||||
|
||||
let (task, _status_code) = server.index("doggo").create(Some("bone")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = server.index("doggo").create(Some("bone")).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
|
||||
let (task, code) = server.create_snapshot().await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
@@ -147,7 +143,7 @@ async fn perform_on_demand_snapshot() {
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
let task = index.wait_task(task.uid()).await;
|
||||
let task = server.wait_task(task.uid()).await;
|
||||
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 4,
|
||||
@@ -203,3 +199,70 @@ async fn perform_on_demand_snapshot() {
|
||||
server.index("doggo").settings(),
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
async fn snapshotception_issue_4653() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
let snapshot_dir = tempfile::tempdir().unwrap();
|
||||
let options =
|
||||
Opt { snapshot_dir: snapshot_dir.path().to_owned(), ..default_settings(temp.path()) };
|
||||
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
let (task, code) = server.create_snapshot().await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(task, { ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": null,
|
||||
"status": "enqueued",
|
||||
"type": "snapshotCreation",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
|
||||
|
||||
let options = Opt { import_snapshot: Some(snapshot_path), ..default_settings(temp.path()) };
|
||||
let snapshot_server = Server::new_with_options(options).await.unwrap();
|
||||
|
||||
// The snapshot should have been taken without the snapshot creation task
|
||||
let (tasks, code) = snapshot_server.tasks().await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(tasks, @r#"
|
||||
{
|
||||
"results": [],
|
||||
"total": 0,
|
||||
"limit": 20,
|
||||
"from": null,
|
||||
"next": null
|
||||
}
|
||||
"#);
|
||||
|
||||
// Ensure the task is not present in the snapshot
|
||||
let (task, code) = snapshot_server.get_task(0).await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
snapshot!(task, @r#"
|
||||
{
|
||||
"message": "Task `0` not found.",
|
||||
"code": "task_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Ensure the batch is also not present
|
||||
let (batch, code) = snapshot_server.get_batch(0).await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
snapshot!(batch, @r#"
|
||||
{
|
||||
"message": "Batch `0` not found.",
|
||||
"code": "batch_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#batch_not_found"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ async fn stats() {
|
||||
let (task, code) = index.create(Some("id")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server.stats().await;
|
||||
|
||||
@@ -58,7 +58,7 @@ async fn stats() {
|
||||
assert_eq!(code, 202, "{response}");
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let timestamp = OffsetDateTime::now_utc();
|
||||
let (response, code) = server.stats().await;
|
||||
@@ -107,7 +107,7 @@ async fn add_remove_embeddings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (stats, _code) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
@@ -135,7 +135,7 @@ async fn add_remove_embeddings() {
|
||||
|
||||
let (response, code) = index.update_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (stats, _code) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
@@ -163,7 +163,7 @@ async fn add_remove_embeddings() {
|
||||
|
||||
let (response, code) = index.update_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (stats, _code) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
@@ -192,7 +192,7 @@ async fn add_remove_embeddings() {
|
||||
|
||||
let (response, code) = index.update_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (stats, _code) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
@@ -245,7 +245,7 @@ async fn add_remove_embedded_documents() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (stats, _code) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
@@ -269,7 +269,7 @@ async fn add_remove_embedded_documents() {
|
||||
// delete one embedded document, remaining 1 embedded documents for 3 embeddings in total
|
||||
let (response, code) = index.delete_document(0).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (stats, _code) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
@@ -305,7 +305,7 @@ async fn update_embedder_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (stats, _code) = index.stats().await;
|
||||
snapshot!(json_string!(stats, {
|
||||
|
||||
@@ -2,16 +2,18 @@
|
||||
//! post requests. The webhook handle starts a server and forwards all the
|
||||
//! received requests into a channel for you to handle.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_http::body::MessageBody;
|
||||
use actix_web::dev::{ServiceFactory, ServiceResponse};
|
||||
use actix_web::web::{Bytes, Data};
|
||||
use actix_web::{post, App, HttpRequest, HttpResponse, HttpServer};
|
||||
use meili_snap::snapshot;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch::Opt;
|
||||
use tokio::sync::mpsc;
|
||||
use url::Url;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::common::{self, default_settings, Server};
|
||||
use crate::json;
|
||||
@@ -68,21 +70,55 @@ async fn create_webhook_server() -> WebhookHandle {
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_basic_webhook() {
|
||||
let WebhookHandle { server_handle, url, mut receiver } = create_webhook_server().await;
|
||||
|
||||
async fn cli_only() {
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse(&url).unwrap()),
|
||||
task_webhook_url: Some(Url::parse("https://example-cli.com/").unwrap()),
|
||||
task_webhook_authorization_header: Some(String::from("Bearer a-secret-token")),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let index = server.index("tamo");
|
||||
let (webhooks, code) = server.get_webhooks().await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(webhooks, @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uuid": "00000000-0000-0000-0000-000000000000",
|
||||
"isEditable": false,
|
||||
"url": "https://example-cli.com/",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn single_receives_data() {
|
||||
let WebhookHandle { server_handle, url, mut receiver } = create_webhook_server().await;
|
||||
|
||||
let server = Server::new().await;
|
||||
|
||||
let (value, code) = server.create_webhook(json!({ "url": url })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]", ".url" => "[ignored]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "[ignored]",
|
||||
"headers": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
// May be flaky: we're relying on the fact that while the first document addition is processed, the other
|
||||
// operations will be received and will be batched together. If it doesn't happen it's not a problem
|
||||
// the rest of the test won't assume anything about the number of tasks per batch.
|
||||
let index = server.index("tamo");
|
||||
for i in 0..5 {
|
||||
let (_, _status) = index.add_documents(json!({ "id": i, "doggo": "bone" }), None).await;
|
||||
}
|
||||
@@ -127,3 +163,496 @@ async fn test_basic_webhook() {
|
||||
|
||||
server_handle.abort();
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn multiple_receive_data() {
|
||||
let WebhookHandle { server_handle: handle1, url: url1, receiver: mut receiver1 } =
|
||||
create_webhook_server().await;
|
||||
let WebhookHandle { server_handle: handle2, url: url2, receiver: mut receiver2 } =
|
||||
create_webhook_server().await;
|
||||
let WebhookHandle { server_handle: handle3, url: url3, receiver: mut receiver3 } =
|
||||
create_webhook_server().await;
|
||||
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse(&url3).unwrap()),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
for url in [url1, url2] {
|
||||
let (value, code) = server.create_webhook(json!({ "url": url })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]", ".url" => "[ignored]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "[ignored]",
|
||||
"headers": {}
|
||||
}
|
||||
"#);
|
||||
}
|
||||
let index = server.index("tamo");
|
||||
let (_, status) = index.add_documents(json!({ "id": 1, "doggo": "bone" }), None).await;
|
||||
snapshot!(status, @"202 Accepted");
|
||||
|
||||
let mut count1 = 0;
|
||||
let mut count2 = 0;
|
||||
let mut count3 = 0;
|
||||
while count1 == 0 || count2 == 0 || count3 == 0 {
|
||||
tokio::select! {
|
||||
msg = receiver1.recv() => { if msg.is_some() { count1 += 1; } },
|
||||
msg = receiver2.recv() => { if msg.is_some() { count2 += 1; } },
|
||||
msg = receiver3.recv() => { if msg.is_some() { count3 += 1; } },
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(count1, 1);
|
||||
assert_eq!(count2, 1);
|
||||
assert_eq!(count3, 1);
|
||||
|
||||
handle1.abort();
|
||||
handle2.abort();
|
||||
handle3.abort();
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn cli_with_dumps() {
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse("http://defined-in-test-cli.com").unwrap()),
|
||||
task_webhook_authorization_header: Some(String::from(
|
||||
"Bearer a-secret-token-defined-in-test-cli",
|
||||
)),
|
||||
import_dump: Some(PathBuf::from("../dump/tests/assets/v6-with-webhooks.dump")),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (webhooks, code) = server.get_webhooks().await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(webhooks, @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"uuid": "00000000-0000-0000-0000-000000000000",
|
||||
"isEditable": false,
|
||||
"url": "http://defined-in-test-cli.com/",
|
||||
"headers": {
|
||||
"Authorization": "Bearer a-secret-token-defined-in-test-cli"
|
||||
}
|
||||
},
|
||||
{
|
||||
"uuid": "627ea538-733d-4545-8d2d-03526eb381ce",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/authorization-less",
|
||||
"headers": {}
|
||||
},
|
||||
{
|
||||
"uuid": "771b0a28-ef28-4082-b984-536f82958c65",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
},
|
||||
{
|
||||
"uuid": "f3583083-f8a7-4cbf-a5e7-fb3f1e28a7e9",
|
||||
"isEditable": true,
|
||||
"url": "https://third.com",
|
||||
"headers": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn reserved_names() {
|
||||
let db_path = tempfile::tempdir().unwrap();
|
||||
let server = Server::new_with_options(Opt {
|
||||
task_webhook_url: Some(Url::parse("https://example-cli.com/").unwrap()),
|
||||
task_webhook_authorization_header: Some(String::from("Bearer a-secret-token")),
|
||||
..default_settings(db_path.path())
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (value, code) = server
|
||||
.patch_webhook(Uuid::nil().to_string(), json!({ "url": "http://localhost:8080" }))
|
||||
.await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Webhook `[uuid]` is immutable. The webhook defined from the command line cannot be modified using the API.",
|
||||
"code": "immutable_webhook",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook"
|
||||
}
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
|
||||
let (value, code) = server.delete_webhook(Uuid::nil().to_string()).await;
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Webhook `[uuid]` is immutable. The webhook defined from the command line cannot be modified using the API.",
|
||||
"code": "immutable_webhook",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook"
|
||||
}
|
||||
"#);
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn over_limits() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Too many webhooks
|
||||
let mut uuids = Vec::new();
|
||||
for _ in 0..20 {
|
||||
let (value, code) = server.create_webhook(json!({ "url": "http://localhost:8080" } )).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
uuids.push(value.get("uuid").unwrap().as_str().unwrap().to_string());
|
||||
}
|
||||
let (value, code) = server.create_webhook(json!({ "url": "http://localhost:8080" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Defining too many webhooks would crush the server. Please limit the number of webhooks to 20. You may use a third-party proxy server to dispatch events to more than 20 endpoints.",
|
||||
"code": "invalid_webhooks",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhooks"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Reset webhooks
|
||||
for uuid in uuids {
|
||||
let (_value, code) = server.delete_webhook(&uuid).await;
|
||||
snapshot!(code, @"204 No Content");
|
||||
}
|
||||
|
||||
// Test too many headers
|
||||
let (value, code) = server.create_webhook(json!({ "url": "http://localhost:8080" })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
let uuid = value.get("uuid").unwrap().as_str().unwrap();
|
||||
for i in 0..200 {
|
||||
let header_name = format!("header_{i}");
|
||||
let (_value, code) =
|
||||
server.patch_webhook(uuid, json!({ "headers": { header_name: "" } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
}
|
||||
let (value, code) =
|
||||
server.patch_webhook(uuid, json!({ "headers": { "header_200": "" } })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Too many headers for the webhook `[uuid]`. Please limit the number of headers to 200. Hint: To remove an already defined header set its value to `null`",
|
||||
"code": "invalid_webhook_headers",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_headers"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn post_get_delete() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"headers": { "authorization": "TOKEN" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let uuid = value.get("uuid").unwrap().as_str().unwrap();
|
||||
let (value, code) = server.get_webhook(uuid).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (_value, code) = server.delete_webhook(uuid).await;
|
||||
snapshot!(code, @"204 No Content");
|
||||
|
||||
let (_value, code) = server.get_webhook(uuid).await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn create_and_patch() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (value, code) =
|
||||
server.create_webhook(json!({ "headers": { "authorization": "TOKEN" } })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "The URL for the webhook `[uuid]` is missing.",
|
||||
"code": "invalid_webhook_url",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_url"
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) = server.create_webhook(json!({ "url": "https://example.com/hook" })).await;
|
||||
snapshot!(code, @"201 Created");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {}
|
||||
}
|
||||
"#);
|
||||
|
||||
let uuid = value.get("uuid").unwrap().as_str().unwrap();
|
||||
let (value, code) =
|
||||
server.patch_webhook(&uuid, json!({ "headers": { "authorization": "TOKEN" } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) =
|
||||
server.patch_webhook(&uuid, json!({ "headers": { "authorization2": "TOKEN" } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization": "TOKEN",
|
||||
"authorization2": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) =
|
||||
server.patch_webhook(&uuid, json!({ "headers": { "authorization": null } })).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"uuid": "[uuid]",
|
||||
"isEditable": true,
|
||||
"url": "https://example.com/hook",
|
||||
"headers": {
|
||||
"authorization2": "TOKEN"
|
||||
}
|
||||
}
|
||||
"#);
|
||||
|
||||
let (value, code) = server.patch_webhook(&uuid, json!({ "url": null })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"message": "The URL for the webhook `[uuid]` is missing.",
|
||||
"code": "invalid_webhook_url",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_url"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn invalid_url_and_headers() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Test invalid URL format
|
||||
let (value, code) = server.create_webhook(json!({ "url": "not-a-valid-url" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid URL `not-a-valid-url`: relative URL without a base",
|
||||
"code": "invalid_webhook_url",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_url"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test invalid header name (containing spaces)
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"headers": { "invalid header name": "value" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid header name `invalid header name`: invalid HTTP header name",
|
||||
"code": "invalid_webhook_headers",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_headers"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test invalid header value (containing control characters)
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"headers": { "authorization": "token\nwith\nnewlines" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid header value `authorization`: failed to parse header value",
|
||||
"code": "invalid_webhook_headers",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_headers"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn invalid_uuid() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Test get webhook with invalid UUID
|
||||
let (value, code) = server.get_webhook("invalid-uuid").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid UUID: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1",
|
||||
"code": "invalid_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test update webhook with invalid UUID
|
||||
let (value, code) =
|
||||
server.patch_webhook("invalid-uuid", json!({ "url": "https://example.com/hook" })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid UUID: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1",
|
||||
"code": "invalid_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test delete webhook with invalid UUID
|
||||
let (value, code) = server.delete_webhook("invalid-uuid").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Invalid UUID: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1",
|
||||
"code": "invalid_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn forbidden_fields() {
|
||||
let server = Server::new().await;
|
||||
|
||||
// Test creating webhook with uuid field
|
||||
let custom_uuid = Uuid::new_v4();
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook",
|
||||
"uuid": custom_uuid.to_string(),
|
||||
"headers": { "authorization": "TOKEN" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Immutable field `uuid`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test creating webhook with isEditable field
|
||||
let (value, code) = server
|
||||
.create_webhook(json!({
|
||||
"url": "https://example.com/hook2",
|
||||
"isEditable": false,
|
||||
"headers": { "authorization": "TOKEN" }
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Immutable field `isEditable`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_is_editable",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_is_editable"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test patching webhook with uuid field
|
||||
let (value, code) = server
|
||||
.patch_webhook(
|
||||
"uuid-whatever",
|
||||
json!({
|
||||
"uuid": Uuid::new_v4(),
|
||||
"headers": { "new-header": "value" }
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"message": "Immutable field `uuid`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_uuid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_uuid"
|
||||
}
|
||||
"#);
|
||||
|
||||
// Test patching webhook with isEditable field
|
||||
let (value, code) = server
|
||||
.patch_webhook(
|
||||
"uuid-whatever",
|
||||
json!({
|
||||
"isEditable": false,
|
||||
"headers": { "another-header": "value" }
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(value, { ".uuid" => "[uuid]" }), @r#"
|
||||
{
|
||||
"message": "Immutable field `isEditable`: expected one of `url`, `headers`",
|
||||
"code": "immutable_webhook_is_editable",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#immutable_webhook_is_editable"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
@@ -43,7 +43,7 @@ async fn version_too_old() {
|
||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.16.0");
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.17.1");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
|
||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.16.1 is higher than the Meilisearch version 1.16.0. Downgrade is not supported");
|
||||
snapshot!(err, @"Database version 1.17.2 is higher than the Meilisearch version 1.17.1. Downgrade is not supported");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.16.0"
|
||||
"upgradeTo": "v1.17.1"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user