mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-02 10:45:36 +00:00
Compare commits
170 Commits
chat-route
...
embedder-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
00a0f74afa | ||
|
|
71b5afa23b | ||
|
|
bff49cae38 | ||
|
|
17abe14bd9 | ||
|
|
170ad87e44 | ||
|
|
8f96724adf | ||
|
|
01e5b0effa | ||
|
|
2ec9664878 | ||
|
|
10028515ac | ||
|
|
63ccd19ab1 | ||
|
|
1b4d344e18 | ||
|
|
89c0cf9b12 | ||
|
|
3770e70581 | ||
|
|
e497008161 | ||
|
|
a15ebb283f | ||
|
|
3f256a7959 | ||
|
|
b41af0d0f6 | ||
|
|
3ebff65ef3 | ||
|
|
1d02efeab9 | ||
|
|
53fc98d3b0 | ||
|
|
61b0f50d4d | ||
|
|
0557a4dd2f | ||
|
|
930d5a09a8 | ||
|
|
8b0c4291ae | ||
|
|
c9efdf8c88 | ||
|
|
72736c0ea9 | ||
|
|
49317bbee4 | ||
|
|
af54c8381e | ||
|
|
693fcd5752 | ||
|
|
733175359a | ||
|
|
7c6162f0bf | ||
|
|
d6ae39bf0f | ||
|
|
e416bbc1de | ||
|
|
2cfd363dc6 | ||
|
|
70aa78a2c2 | ||
|
|
96c81762ed | ||
|
|
0b1f634afa | ||
|
|
d3d5015854 | ||
|
|
f95f29c492 | ||
|
|
a50b69b868 | ||
|
|
3668f5f021 | ||
|
|
54fdf379bb | ||
|
|
41b1cd5a73 | ||
|
|
5c14a25d5a | ||
|
|
fda2843135 | ||
|
|
9347330f3a | ||
|
|
56c9190dab | ||
|
|
6b986dceaf | ||
|
|
ea6bb4df1d | ||
|
|
a3d2f64725 | ||
|
|
d5526cffff | ||
|
|
5cb75d1f2a | ||
|
|
921e3c4ffe | ||
|
|
52591761af | ||
|
|
f80182f0a9 | ||
|
|
3b30b6a57a | ||
|
|
5efc78db55 | ||
|
|
cffbe3fcb6 | ||
|
|
8d8fcb9846 | ||
|
|
20049669c9 | ||
|
|
db28d13cb1 | ||
|
|
5a7cfc57fd | ||
|
|
790621dc29 | ||
|
|
1d577ae98b | ||
|
|
88e9a55d44 | ||
|
|
dbe551cf99 | ||
|
|
a299fbd33b | ||
|
|
193119acb9 | ||
|
|
4c71118699 | ||
|
|
5fe2943d3c | ||
|
|
86ff502327 | ||
|
|
6b1a345dce | ||
|
|
b54ece690b | ||
|
|
3ea167bade | ||
|
|
1158d6689f | ||
|
|
d9b0463a0b | ||
|
|
ae9899f179 | ||
|
|
308fd7128e | ||
|
|
27e7c00622 | ||
|
|
58207da934 | ||
|
|
fb8b832192 | ||
|
|
17207b5405 | ||
|
|
bd95503eba | ||
|
|
8b8b0d802c | ||
|
|
d329e86250 | ||
|
|
d416b3b390 | ||
|
|
54f5e74744 | ||
|
|
fd4b192a39 | ||
|
|
3c13feebf7 | ||
|
|
1811168b96 | ||
|
|
b06cc1e0a2 | ||
|
|
44f812c36d | ||
|
|
c8e77b5f25 | ||
|
|
283f516e15 | ||
|
|
b4ca0a8c98 | ||
|
|
b658e38acd | ||
|
|
f87e46cc16 | ||
|
|
65354b414a | ||
|
|
025df397c0 | ||
|
|
f77abc9dc8 | ||
|
|
7e9909ee45 | ||
|
|
43ec97fe45 | ||
|
|
02929e241b | ||
|
|
c13efde042 | ||
|
|
36f0a1492c | ||
|
|
ce65ad213b | ||
|
|
3e0de6cb83 | ||
|
|
f3d691667d | ||
|
|
ce9c930d10 | ||
|
|
fc88b003b4 | ||
|
|
cf5d26124a | ||
|
|
38b1c57fa8 | ||
|
|
25c525b057 | ||
|
|
83cd28b60b | ||
|
|
48cad4132a | ||
|
|
4897ad99d0 | ||
|
|
46ff78b4ec | ||
|
|
9ad43b6841 | ||
|
|
c9ec502ed9 | ||
|
|
18aed75d3b | ||
|
|
6738a4f6ee | ||
|
|
d2948adea3 | ||
|
|
f54b57e5be | ||
|
|
95821d0bde | ||
|
|
f690fa0686 | ||
|
|
24e94b28c1 | ||
|
|
34d58f35c8 | ||
|
|
1d5265caf4 | ||
|
|
97aeb6db4d | ||
|
|
f888f87635 | ||
|
|
8c8d98eeaa | ||
|
|
c5ae43cac6 | ||
|
|
57eecd6197 | ||
|
|
2fe5c78cb6 | ||
|
|
8047cfe438 | ||
|
|
5717e5c1af | ||
|
|
bb07038c31 | ||
|
|
d1a088ea0b | ||
|
|
b68e22c0e6 | ||
|
|
03a36f116e | ||
|
|
8a0bf24ed5 | ||
|
|
e2763471e5 | ||
|
|
b2f2c5d69f | ||
|
|
1594c54e23 | ||
|
|
13b607bd68 | ||
|
|
3d130d31c8 | ||
|
|
4cda584b0c | ||
|
|
248c90bad5 | ||
|
|
0e9040e605 | ||
|
|
3e3c00f44c | ||
|
|
d986a3bbaf | ||
|
|
c2ceb8e41b | ||
|
|
79db2e67fb | ||
|
|
865f24cfef | ||
|
|
3fbe1df770 | ||
|
|
150d1db86b | ||
|
|
806e983aa5 | ||
|
|
e96c1d4b0f | ||
|
|
15cdc6924b | ||
|
|
677e8b122c | ||
|
|
75a7e40a27 | ||
|
|
c8939944c6 | ||
|
|
4e6252fb03 | ||
|
|
8bd8e744f3 | ||
|
|
53f32a7dd7 | ||
|
|
47a7ed93d3 | ||
|
|
2ac826edca | ||
|
|
89aff2081c | ||
|
|
3b773b3416 | ||
|
|
648b2876f6 |
10
.github/workflows/db-change-missing.yml
vendored
10
.github/workflows/db-change-missing.yml
vendored
@@ -4,22 +4,22 @@ on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
|
||||
jobs:
|
||||
check-labels:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Check db change labels
|
||||
id: check_labels
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels
|
||||
echo ${{ github.event.pull_request.number }}
|
||||
echo $URL
|
||||
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
|
||||
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
|
||||
echo "Labels: $LABELS"
|
||||
if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then
|
||||
echo "::error::Pull request must contain either the 'db change' or 'no db change' label."
|
||||
exit 1
|
||||
|
||||
2817
Cargo.lock
generated
2817
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -31,7 +31,7 @@ anyhow = "1.0.95"
|
||||
bytes = "1.9.0"
|
||||
convert_case = "0.6.0"
|
||||
flate2 = "1.0.35"
|
||||
reqwest = { version = "0.12.15", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
reqwest = { version = "0.12.12", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["milli/all-tokenizations"]
|
||||
|
||||
@@ -398,7 +398,6 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
search_cutoff_ms: v6::Setting::NotSet,
|
||||
facet_search: v6::Setting::NotSet,
|
||||
prefix_search: v6::Setting::NotSet,
|
||||
chat: v6::Setting::NotSet,
|
||||
_kind: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ mod lru;
|
||||
mod processing;
|
||||
mod queue;
|
||||
mod scheduler;
|
||||
mod settings;
|
||||
#[cfg(test)]
|
||||
mod test_utils;
|
||||
pub mod upgrade;
|
||||
@@ -54,8 +53,8 @@ use flate2::Compression;
|
||||
use meilisearch_types::batches::Batch;
|
||||
use meilisearch_types::features::{InstanceTogglableFeatures, Network, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::heed::byteorder::BE;
|
||||
use meilisearch_types::heed::types::{SerdeJson, Str, I128};
|
||||
use meilisearch_types::heed::{self, Database, Env, RoTxn, Unspecified, WithoutTls};
|
||||
use meilisearch_types::heed::types::I128;
|
||||
use meilisearch_types::heed::{self, Env, RoTxn, WithoutTls};
|
||||
use meilisearch_types::milli::index::IndexEmbeddingConfig;
|
||||
use meilisearch_types::milli::update::IndexerConfig;
|
||||
use meilisearch_types::milli::vector::{Embedder, EmbedderOptions, EmbeddingConfigs};
|
||||
@@ -143,8 +142,6 @@ pub struct IndexScheduler {
|
||||
/// The list of tasks currently processing
|
||||
pub(crate) processing_tasks: Arc<RwLock<ProcessingTasks>>,
|
||||
|
||||
/// The main database that also has the chat settings.
|
||||
pub main: Database<Str, Unspecified>,
|
||||
/// A database containing only the version of the index-scheduler
|
||||
pub version: versioning::Versioning,
|
||||
/// The queue containing both the tasks and the batches.
|
||||
@@ -199,7 +196,7 @@ impl IndexScheduler {
|
||||
version: self.version.clone(),
|
||||
queue: self.queue.private_clone(),
|
||||
scheduler: self.scheduler.private_clone(),
|
||||
main: self.main.clone(),
|
||||
|
||||
index_mapper: self.index_mapper.clone(),
|
||||
cleanup_enabled: self.cleanup_enabled,
|
||||
webhook_url: self.webhook_url.clone(),
|
||||
@@ -270,7 +267,6 @@ impl IndexScheduler {
|
||||
let features = features::FeatureData::new(&env, &mut wtxn, options.instance_features)?;
|
||||
let queue = Queue::new(&env, &mut wtxn, &options)?;
|
||||
let index_mapper = IndexMapper::new(&env, &mut wtxn, &options, budget)?;
|
||||
let chat_settings = env.create_database(&mut wtxn, Some("chat-settings"))?;
|
||||
wtxn.commit()?;
|
||||
|
||||
// allow unreachable_code to get rids of the warning in the case of a test build.
|
||||
@@ -294,7 +290,6 @@ impl IndexScheduler {
|
||||
#[cfg(test)]
|
||||
run_loop_iteration: Arc::new(RwLock::new(0)),
|
||||
features,
|
||||
chat_settings,
|
||||
};
|
||||
|
||||
this.run();
|
||||
@@ -862,18 +857,6 @@ impl IndexScheduler {
|
||||
.collect();
|
||||
res.map(EmbeddingConfigs::new)
|
||||
}
|
||||
|
||||
pub fn chat_settings(&self) -> Result<Option<serde_json::Value>> {
|
||||
let rtxn = self.env.read_txn().map_err(Error::HeedTransaction)?;
|
||||
self.chat_settings.get(&rtxn, "main").map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn put_chat_settings(&self, settings: &serde_json::Value) -> Result<()> {
|
||||
let mut wtxn = self.env.write_txn().map_err(Error::HeedTransaction)?;
|
||||
self.chat_settings.put(&mut wtxn, "main", settings)?;
|
||||
wtxn.commit().map_err(Error::HeedTransaction)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// The outcome of calling the [`IndexScheduler::tick`] function.
|
||||
|
||||
@@ -5,7 +5,7 @@ use meilisearch_types::milli::documents::PrimaryKey;
|
||||
use meilisearch_types::milli::progress::Progress;
|
||||
use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction};
|
||||
use meilisearch_types::milli::update::DocumentAdditionResult;
|
||||
use meilisearch_types::milli::{self, ChannelCongestion, Filter, ThreadPoolNoAbortBuilder};
|
||||
use meilisearch_types::milli::{self, ChannelCongestion, Filter};
|
||||
use meilisearch_types::settings::apply_settings_to_builder;
|
||||
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
|
||||
use meilisearch_types::Index;
|
||||
@@ -113,18 +113,8 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges);
|
||||
let (document_changes, operation_stats, primary_key) = indexer
|
||||
@@ -266,18 +256,8 @@ impl IndexScheduler {
|
||||
|
||||
let mut congestion = None;
|
||||
if task.error.is_none() {
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let candidates_count = candidates.len();
|
||||
progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges);
|
||||
@@ -429,18 +409,8 @@ impl IndexScheduler {
|
||||
|
||||
let mut congestion = None;
|
||||
if !tasks.iter().all(|res| res.error.is_some()) {
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
progress.update_progress(DocumentDeletionProgress::DeleteDocuments);
|
||||
let mut indexer = indexer::DocumentDeletion::new();
|
||||
|
||||
@@ -1,432 +0,0 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::convert::Infallible;
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::{ControlFlow, Deref};
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
||||
use fst::IntoStreamer;
|
||||
use milli::disabled_typos_terms::DisabledTyposTerms;
|
||||
use milli::index::{IndexEmbeddingConfig, PrefixSearch};
|
||||
use milli::proximity::ProximityPrecision;
|
||||
use milli::update::Setting;
|
||||
use milli::{FilterableAttributesRule, Index};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::heed::RoTxn;
|
||||
use crate::IndexScheduler;
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsTypoTolerance>>)]
|
||||
pub struct PromptsSettings {
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub system: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
#[schema(value_type = Option<MinWordSizeTyposSetting>, example = json!({ "oneTypo": 5, "twoTypo": 9 }))]
|
||||
pub search_description: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub search_q_param: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub pre_query: Setting<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub enum ChatSource {
|
||||
#[default]
|
||||
OpenAi,
|
||||
}
|
||||
|
||||
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
||||
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
||||
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(
|
||||
deny_unknown_fields,
|
||||
rename_all = "camelCase",
|
||||
bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
|
||||
)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct ChatSettings<T> {
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDisplayedAttributes>)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub source: Setting<ChatSource>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub base_api: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub api_key: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFilterableAttributes>)]
|
||||
#[schema(value_type = Option<PromptsSettings>)]
|
||||
pub prompts: Setting<PromptsSettings>,
|
||||
|
||||
#[serde(skip)]
|
||||
#[deserr(skip)]
|
||||
pub _kind: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T> ChatSettings<T> {
|
||||
pub fn hide_secrets(&mut self) {
|
||||
match &mut self.api_key {
|
||||
Setting::Set(key) => Self::hide_secrets(key),
|
||||
Setting::Reset => todo!(),
|
||||
Setting::NotSet => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn hide_secret(secret: &mut String) {
|
||||
match secret.len() {
|
||||
x if x < 10 => {
|
||||
secret.replace_range(.., "XXX...");
|
||||
}
|
||||
x if x < 20 => {
|
||||
secret.replace_range(2.., "XXXX...");
|
||||
}
|
||||
x if x < 30 => {
|
||||
secret.replace_range(3.., "XXXXX...");
|
||||
}
|
||||
_x => {
|
||||
secret.replace_range(5.., "XXXXXX...");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ChatSettings<Checked> {
|
||||
pub fn cleared() -> ChatSettings<Checked> {
|
||||
ChatSettings {
|
||||
source: Setting::Reset,
|
||||
base_api: Setting::Reset,
|
||||
api_key: Setting::Reset,
|
||||
prompts: Setting::Reset,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_unchecked(self) -> ChatSettings<Unchecked> {
|
||||
let Self { source, base_api, api_key, prompts, _kind } = self;
|
||||
ChatSettings { source, base_api, api_key, prompts, _kind: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl ChatSettings<Unchecked> {
|
||||
pub fn check(self) -> ChatSettings<Checked> {
|
||||
ChatSettings {
|
||||
source: self.source,
|
||||
base_api: self.base_api,
|
||||
api_key: self.api_key,
|
||||
prompts: self.prompts,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn validate(self) -> Result<Self, milli::Error> {
|
||||
self.validate_prompt_settings()?;
|
||||
self.validate_global_settings()
|
||||
}
|
||||
|
||||
fn validate_global_settings(mut self) -> Result<Self, milli::Error> {
|
||||
// Check that the ApiBase is a valid URL
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn validate_prompt_settings(mut self) -> Result<Self, milli::Error> {
|
||||
// TODO
|
||||
// let Setting::Set(mut configs) = self.embedders else { return Ok(self) };
|
||||
// for (name, config) in configs.iter_mut() {
|
||||
// let config_to_check = std::mem::take(config);
|
||||
// let checked_config =
|
||||
// milli::update::validate_embedding_settings(config_to_check.inner, name)?;
|
||||
// *config = SettingEmbeddingSettings { inner: checked_config };
|
||||
// }
|
||||
// self.embedders = Setting::Set(configs);
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: &Self) {
|
||||
// For most settings only the latest version is kept
|
||||
*self = Self {
|
||||
source: other.source.or(self.source),
|
||||
base_api: other.base_api.or(self.base_api),
|
||||
api_key: other.api_key.or(self.api_key),
|
||||
prompts: match (self.prompts, other.prompts) {
|
||||
(Setting::NotSet, set) | (set, Setting::NotSet) => set,
|
||||
(Setting::Set(_) | Setting::Reset, Setting::Reset) => Setting::Reset,
|
||||
(Setting::Reset, Setting::Set(set)) => Setting::Set(set),
|
||||
// If both are set we must merge the prompts settings
|
||||
(Setting::Set(this), Setting::Set(other)) => Setting::Set(PromptsSettings {
|
||||
system: other.system.or(system),
|
||||
search_description: other.search_description.or(search_description),
|
||||
search_q_param: other.search_q_param.or(search_q_param),
|
||||
pre_query: other.pre_query.or(pre_query),
|
||||
}),
|
||||
},
|
||||
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_settings_to_builder(
|
||||
settings: &ChatSettings<Checked>,
|
||||
// TODO we must not store this into milli but in the index scheduler
|
||||
builder: &mut milli::update::Settings,
|
||||
) {
|
||||
let ChatSettings { source, base_api, api_key, prompts, _kind } = settings;
|
||||
|
||||
match source.deref() {
|
||||
Setting::Set(ref names) => builder.set_searchable_fields(names.clone()),
|
||||
Setting::Reset => builder.reset_searchable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match displayed_attributes.deref() {
|
||||
Setting::Set(ref names) => builder.set_displayed_fields(names.clone()),
|
||||
Setting::Reset => builder.reset_displayed_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match filterable_attributes {
|
||||
Setting::Set(ref facets) => {
|
||||
builder.set_filterable_fields(facets.clone().into_iter().collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_filterable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match sortable_attributes {
|
||||
Setting::Set(ref fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
|
||||
Setting::Reset => builder.reset_sortable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match ranking_rules {
|
||||
Setting::Set(ref criteria) => {
|
||||
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_criteria(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match stop_words {
|
||||
Setting::Set(ref stop_words) => builder.set_stop_words(stop_words.clone()),
|
||||
Setting::Reset => builder.reset_stop_words(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match non_separator_tokens {
|
||||
Setting::Set(ref non_separator_tokens) => {
|
||||
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match separator_tokens {
|
||||
Setting::Set(ref separator_tokens) => {
|
||||
builder.set_separator_tokens(separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_separator_tokens(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match dictionary {
|
||||
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||
Setting::Reset => builder.reset_dictionary(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match synonyms {
|
||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||
Setting::Reset => builder.reset_synonyms(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match distinct_attribute {
|
||||
Setting::Set(ref attr) => builder.set_distinct_field(attr.clone()),
|
||||
Setting::Reset => builder.reset_distinct_field(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match proximity_precision {
|
||||
Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()),
|
||||
Setting::Reset => builder.reset_proximity_precision(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match localized_attributes_rules {
|
||||
Setting::Set(ref rules) => builder
|
||||
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
|
||||
Setting::Reset => builder.reset_localized_attributes_rules(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match typo_tolerance {
|
||||
Setting::Set(ref value) => {
|
||||
match value.enabled {
|
||||
Setting::Set(val) => builder.set_autorize_typos(val),
|
||||
Setting::Reset => builder.reset_authorize_typos(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.min_word_size_for_typos {
|
||||
Setting::Set(ref setting) => {
|
||||
match setting.one_typo {
|
||||
Setting::Set(val) => builder.set_min_word_len_one_typo(val),
|
||||
Setting::Reset => builder.reset_min_word_len_one_typo(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
match setting.two_typos {
|
||||
Setting::Set(val) => builder.set_min_word_len_two_typos(val),
|
||||
Setting::Reset => builder.reset_min_word_len_two_typos(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
Setting::Reset => {
|
||||
builder.reset_min_word_len_one_typo();
|
||||
builder.reset_min_word_len_two_typos();
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.disable_on_words {
|
||||
Setting::Set(ref words) => {
|
||||
builder.set_exact_words(words.clone());
|
||||
}
|
||||
Setting::Reset => builder.reset_exact_words(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.disable_on_attributes {
|
||||
Setting::Set(ref words) => {
|
||||
builder.set_exact_attributes(words.iter().cloned().collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_exact_attributes(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.disable_on_numbers {
|
||||
Setting::Set(val) => builder.set_disable_on_numbers(val),
|
||||
Setting::Reset => builder.reset_disable_on_numbers(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
Setting::Reset => {
|
||||
// all typo settings need to be reset here.
|
||||
builder.reset_authorize_typos();
|
||||
builder.reset_min_word_len_one_typo();
|
||||
builder.reset_min_word_len_two_typos();
|
||||
builder.reset_exact_words();
|
||||
builder.reset_exact_attributes();
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match faceting {
|
||||
Setting::Set(FacetingSettings { max_values_per_facet, sort_facet_values_by }) => {
|
||||
match max_values_per_facet {
|
||||
Setting::Set(val) => builder.set_max_values_per_facet(*val),
|
||||
Setting::Reset => builder.reset_max_values_per_facet(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
match sort_facet_values_by {
|
||||
Setting::Set(val) => builder.set_sort_facet_values_by(
|
||||
val.iter().map(|(name, order)| (name.clone(), (*order).into())).collect(),
|
||||
),
|
||||
Setting::Reset => builder.reset_sort_facet_values_by(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
Setting::Reset => {
|
||||
builder.reset_max_values_per_facet();
|
||||
builder.reset_sort_facet_values_by();
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match pagination {
|
||||
Setting::Set(ref value) => match value.max_total_hits {
|
||||
Setting::Set(val) => builder.set_pagination_max_total_hits(val),
|
||||
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
||||
Setting::NotSet => (),
|
||||
},
|
||||
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match embedders {
|
||||
Setting::Set(value) => builder.set_embedder_settings(
|
||||
value.iter().map(|(k, v)| (k.clone(), v.inner.clone())).collect(),
|
||||
),
|
||||
Setting::Reset => builder.reset_embedder_settings(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match search_cutoff_ms {
|
||||
Setting::Set(cutoff) => builder.set_search_cutoff(*cutoff),
|
||||
Setting::Reset => builder.reset_search_cutoff(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match prefix_search {
|
||||
Setting::Set(prefix_search) => {
|
||||
builder.set_prefix_search(PrefixSearch::from(*prefix_search))
|
||||
}
|
||||
Setting::Reset => builder.reset_prefix_search(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match facet_search {
|
||||
Setting::Set(facet_search) => builder.set_facet_search(*facet_search),
|
||||
Setting::Reset => builder.reset_facet_search(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match chat {
|
||||
Setting::Set(chat) => builder.set_chat(chat.clone()),
|
||||
Setting::Reset => builder.reset_chat(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SecretPolicy {
|
||||
RevealSecrets,
|
||||
HideSecrets,
|
||||
}
|
||||
|
||||
pub fn settings(
|
||||
index_scheduler: &IndexScheduler,
|
||||
rtxn: &RoTxn,
|
||||
secret_policy: SecretPolicy,
|
||||
) -> Result<Settings<Checked>, milli::Error> {
|
||||
let mut settings = index_scheduler.chat_settings(rtxn)?;
|
||||
if let SecretPolicy::HideSecrets = secret_policy {
|
||||
settings.hide_secrets()
|
||||
}
|
||||
Ok(settings)
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
mod chat;
|
||||
|
||||
pub use chat::ChatSettings;
|
||||
@@ -15,3 +15,5 @@ license.workspace = true
|
||||
insta = { version = "=1.39.0", features = ["json", "redactions"] }
|
||||
md5 = "0.7.0"
|
||||
once_cell = "1.20"
|
||||
regex-lite = "0.1.6"
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
|
||||
@@ -4,9 +4,16 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub use insta;
|
||||
use insta::internals::{Content, ContentPath};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex_lite::Regex;
|
||||
|
||||
static SNAPSHOT_NAMES: Lazy<Mutex<HashMap<PathBuf, usize>>> = Lazy::new(Mutex::default);
|
||||
/// A regex to match UUIDs in messages, specifically looking for the UUID v4 format
|
||||
static UUID_IN_MESSAGE_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}")
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// Return the md5 hash of the given string
|
||||
pub fn hash_snapshot(snap: &str) -> String {
|
||||
@@ -26,6 +33,34 @@ pub fn default_snapshot_settings_for_test<'a>(
|
||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||
settings.set_omit_expression(true);
|
||||
|
||||
fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content {
|
||||
match &content {
|
||||
Content::String(s) => {
|
||||
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]");
|
||||
Content::String(uuid_replaced.to_string())
|
||||
}
|
||||
_ => content,
|
||||
}
|
||||
}
|
||||
|
||||
settings.add_dynamic_redaction(".message", uuid_in_message_redaction);
|
||||
settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction);
|
||||
settings.add_dynamic_redaction(".indexUid", |content, _content_path| match &content {
|
||||
Content::String(s) => match uuid::Uuid::parse_str(s) {
|
||||
Ok(_) => Content::String("[uuid]".to_owned()),
|
||||
Err(_) => content,
|
||||
},
|
||||
_ => content,
|
||||
});
|
||||
|
||||
settings.add_dynamic_redaction(".error.message", |content, _content_path| match &content {
|
||||
Content::String(s) => {
|
||||
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "$before[uuid]$after");
|
||||
Content::String(uuid_replaced.to_string())
|
||||
}
|
||||
_ => content,
|
||||
});
|
||||
|
||||
let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name);
|
||||
let test_name = test_name.rsplit("::").next().unwrap().to_owned();
|
||||
|
||||
@@ -232,6 +267,9 @@ macro_rules! json_string {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate as meili_snap;
|
||||
use crate::UUID_IN_MESSAGE_RE;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn snap() {
|
||||
snapshot_hash!(10, @"d3d9446802a44259755d38e6d163e820");
|
||||
@@ -279,4 +317,14 @@ mod tests {
|
||||
// snapshot_hash!("", name: "", @"d41d8cd98f00b204e9800998ecf8427e");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uuid_in_message_regex() {
|
||||
let uuid1 = Uuid::new_v4();
|
||||
let uuid2 = Uuid::new_v4();
|
||||
let uuid3 = Uuid::new_v4();
|
||||
let to_replace = format!("1 {uuid1} 2 {uuid2} 3 {uuid3} 4");
|
||||
let replaced = UUID_IN_MESSAGE_RE.replace_all(to_replace.as_str(), "[uuid]");
|
||||
assert_eq!(replaced, "1 [uuid] 2 [uuid] 3 [uuid] 4");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -351,7 +351,6 @@ pub struct IndexSearchRules {
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
store.put_api_key(Key::default_admin())?;
|
||||
store.put_api_key(Key::default_search())?;
|
||||
store.put_api_key(Key::default_chat())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -387,8 +387,7 @@ VectorEmbeddingError , InvalidRequest , BAD_REQUEST ;
|
||||
NotFoundSimilarId , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentEditionContext , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentEditionFunctionFilter , InvalidRequest , BAD_REQUEST ;
|
||||
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsIndexChat , InvalidRequest , BAD_REQUEST
|
||||
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST
|
||||
}
|
||||
|
||||
impl ErrorCode for JoinError {
|
||||
|
||||
@@ -158,21 +158,6 @@ impl Key {
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_chat() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
name: Some("Default Chat API Key".to_string()),
|
||||
description: Some("Use it to chat and search from the frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::Chat, Action::Search],
|
||||
indexes: vec![IndexUidPattern::all()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_expiration_date(
|
||||
@@ -323,18 +308,6 @@ pub enum Action {
|
||||
#[serde(rename = "network.update")]
|
||||
#[deserr(rename = "network.update")]
|
||||
NetworkUpdate,
|
||||
#[serde(rename = "chat.get")]
|
||||
#[deserr(rename = "chat.get")]
|
||||
Chat,
|
||||
#[serde(rename = "chatSettings.*")]
|
||||
#[deserr(rename = "chatSettings.*")]
|
||||
ChatSettingsAll,
|
||||
#[serde(rename = "chatSettings.get")]
|
||||
#[deserr(rename = "chatSettings.get")]
|
||||
ChatSettingsGet,
|
||||
#[serde(rename = "chatSettings.update")]
|
||||
#[deserr(rename = "chatSettings.update")]
|
||||
ChatSettingsUpdate,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
@@ -360,9 +333,6 @@ impl Action {
|
||||
SETTINGS_ALL => Some(Self::SettingsAll),
|
||||
SETTINGS_GET => Some(Self::SettingsGet),
|
||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||
CHAT_SETTINGS_ALL => Some(Self::ChatSettingsAll),
|
||||
CHAT_SETTINGS_GET => Some(Self::ChatSettingsGet),
|
||||
CHAT_SETTINGS_UPDATE => Some(Self::ChatSettingsUpdate),
|
||||
STATS_ALL => Some(Self::StatsAll),
|
||||
STATS_GET => Some(Self::StatsGet),
|
||||
METRICS_ALL => Some(Self::MetricsAll),
|
||||
@@ -379,7 +349,6 @@ impl Action {
|
||||
EXPERIMENTAL_FEATURES_UPDATE => Some(Self::ExperimentalFeaturesUpdate),
|
||||
NETWORK_GET => Some(Self::NetworkGet),
|
||||
NETWORK_UPDATE => Some(Self::NetworkUpdate),
|
||||
CHAT => Some(Self::Chat),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
@@ -428,9 +397,4 @@ pub mod actions {
|
||||
|
||||
pub const NETWORK_GET: u8 = NetworkGet.repr();
|
||||
pub const NETWORK_UPDATE: u8 = NetworkUpdate.repr();
|
||||
|
||||
pub const CHAT: u8 = Chat.repr();
|
||||
pub const CHAT_SETTINGS_ALL: u8 = ChatSettingsAll.repr();
|
||||
pub const CHAT_SETTINGS_GET: u8 = ChatSettingsGet.repr();
|
||||
pub const CHAT_SETTINGS_UPDATE: u8 = ChatSettingsUpdate.repr();
|
||||
}
|
||||
|
||||
@@ -11,13 +11,11 @@ use fst::IntoStreamer;
|
||||
use milli::disabled_typos_terms::DisabledTyposTerms;
|
||||
use milli::index::{IndexEmbeddingConfig, PrefixSearch};
|
||||
use milli::proximity::ProximityPrecision;
|
||||
pub use milli::update::ChatSettings;
|
||||
use milli::update::Setting;
|
||||
use milli::{Criterion, CriterionError, FilterableAttributesRule, Index, DEFAULT_VALUES_PER_FACET};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use super::{Checked, Unchecked};
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::facet_values_sort::FacetValuesSort;
|
||||
@@ -201,86 +199,72 @@ pub struct Settings<T> {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDisplayedAttributes>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["id", "title", "description", "url"]))]
|
||||
pub displayed_attributes: WildcardSetting,
|
||||
|
||||
/// Fields in which to search for matching query words sorted by order of importance.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["title", "description"]))]
|
||||
pub searchable_attributes: WildcardSetting,
|
||||
|
||||
/// Attributes to use for faceting and filtering. See [Filtering and Faceted Search](https://www.meilisearch.com/docs/learn/filtering_and_sorting/search_with_facet_filters).
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFilterableAttributes>)]
|
||||
#[schema(value_type = Option<Vec<FilterableAttributesRule>>, example = json!(["release_date", "genre"]))]
|
||||
pub filterable_attributes: Setting<Vec<FilterableAttributesRule>>,
|
||||
|
||||
/// Attributes to use when sorting search results.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSortableAttributes>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["release_date"]))]
|
||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of ranking rules sorted by order of importance. The order is customizable.
|
||||
/// [A list of ordered built-in ranking rules](https://www.meilisearch.com/docs/learn/relevancy/relevancy).
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsRankingRules>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!([RankingRuleView::Words, RankingRuleView::Typo, RankingRuleView::Proximity, RankingRuleView::Attribute, RankingRuleView::Exactness]))]
|
||||
pub ranking_rules: Setting<Vec<RankingRuleView>>,
|
||||
|
||||
/// List of words ignored when present in search queries.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["the", "a", "them", "their"]))]
|
||||
pub stop_words: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of characters not delimiting where one term begins and ends.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!([" ", "\n"]))]
|
||||
pub non_separator_tokens: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of characters delimiting where one term begins and ends.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["S"]))]
|
||||
pub separator_tokens: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of strings Meilisearch should parse as a single term.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["iPhone pro"]))]
|
||||
pub dictionary: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of associated words treated similarly. A word associated to an array of word as synonyms.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||
#[schema(value_type = Option<BTreeMap<String, Vec<String>>>, example = json!({ "he": ["she", "they", "them"], "phone": ["iPhone", "android"]}))]
|
||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||
|
||||
/// Search returns documents with distinct (different) values of the given field.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDistinctAttribute>)]
|
||||
#[schema(value_type = Option<String>, example = json!("sku"))]
|
||||
pub distinct_attribute: Setting<String>,
|
||||
|
||||
/// Precision level when calculating the proximity ranking rule.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsProximityPrecision>)]
|
||||
#[schema(value_type = Option<String>, example = json!(ProximityPrecisionView::ByAttribute))]
|
||||
pub proximity_precision: Setting<ProximityPrecisionView>,
|
||||
|
||||
/// Customize typo tolerance feature.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
#[schema(value_type = Option<TypoSettings>, example = json!({ "enabled": true, "disableOnAttributes": ["title"]}))]
|
||||
pub typo_tolerance: Setting<TypoSettings>,
|
||||
|
||||
/// Faceting settings.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFaceting>)]
|
||||
#[schema(value_type = Option<FacetingSettings>, example = json!({ "maxValuesPerFacet": 10, "sortFacetValuesBy": { "genre": FacetValuesSort::Count }}))]
|
||||
pub faceting: Setting<FacetingSettings>,
|
||||
|
||||
/// Pagination settings.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsPagination>)]
|
||||
@@ -292,34 +276,24 @@ pub struct Settings<T> {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsEmbedders>)]
|
||||
#[schema(value_type = Option<BTreeMap<String, SettingEmbeddingSettings>>)]
|
||||
pub embedders: Setting<BTreeMap<String, SettingEmbeddingSettings>>,
|
||||
|
||||
/// Maximum duration of a search query.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchCutoffMs>)]
|
||||
#[schema(value_type = Option<u64>, example = json!(50))]
|
||||
pub search_cutoff_ms: Setting<u64>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsLocalizedAttributes>)]
|
||||
#[schema(value_type = Option<Vec<LocalizedAttributesRuleView>>, example = json!(50))]
|
||||
pub localized_attributes: Setting<Vec<LocalizedAttributesRuleView>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFacetSearch>)]
|
||||
#[schema(value_type = Option<bool>, example = json!(true))]
|
||||
pub facet_search: Setting<bool>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsPrefixSearch>)]
|
||||
#[schema(value_type = Option<PrefixSearchSettings>, example = json!("Hemlo"))]
|
||||
pub prefix_search: Setting<PrefixSearchSettings>,
|
||||
|
||||
/// Customize the chat prompting.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsIndexChat>)]
|
||||
#[schema(value_type = Option<ChatSettings>)]
|
||||
pub chat: Setting<ChatSettings>,
|
||||
|
||||
#[serde(skip)]
|
||||
#[deserr(skip)]
|
||||
pub _kind: PhantomData<T>,
|
||||
@@ -385,7 +359,6 @@ impl Settings<Checked> {
|
||||
localized_attributes: Setting::Reset,
|
||||
facet_search: Setting::Reset,
|
||||
prefix_search: Setting::Reset,
|
||||
chat: Setting::Reset,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -412,7 +385,6 @@ impl Settings<Checked> {
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
chat,
|
||||
_kind,
|
||||
} = self;
|
||||
|
||||
@@ -437,7 +409,6 @@ impl Settings<Checked> {
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
chat,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -488,7 +459,6 @@ impl Settings<Unchecked> {
|
||||
localized_attributes: self.localized_attributes,
|
||||
facet_search: self.facet_search,
|
||||
prefix_search: self.prefix_search,
|
||||
chat: self.chat,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -563,9 +533,8 @@ impl Settings<Unchecked> {
|
||||
Setting::Set(this)
|
||||
}
|
||||
},
|
||||
facet_search: other.facet_search.or(self.facet_search),
|
||||
prefix_search: other.prefix_search.or(self.prefix_search),
|
||||
chat: other.chat.clone().or(self.chat.clone()),
|
||||
facet_search: other.facet_search.or(self.facet_search),
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -604,7 +573,6 @@ pub fn apply_settings_to_builder(
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
chat,
|
||||
_kind,
|
||||
} = settings;
|
||||
|
||||
@@ -698,7 +666,7 @@ pub fn apply_settings_to_builder(
|
||||
match typo_tolerance {
|
||||
Setting::Set(ref value) => {
|
||||
match value.enabled {
|
||||
Setting::Set(val) => builder.set_autorize_typos(val),
|
||||
Setting::Set(val) => builder.set_authorize_typos(val),
|
||||
Setting::Reset => builder.reset_authorize_typos(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
@@ -815,12 +783,6 @@ pub fn apply_settings_to_builder(
|
||||
Setting::Reset => builder.reset_facet_search(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match chat {
|
||||
Setting::Set(chat) => builder.set_chat(chat.clone()),
|
||||
Setting::Reset => builder.reset_chat(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SecretPolicy {
|
||||
@@ -918,11 +880,14 @@ pub fn settings(
|
||||
})
|
||||
.collect();
|
||||
let embedders = Setting::Set(embedders);
|
||||
|
||||
let search_cutoff_ms = index.search_cutoff(rtxn)?;
|
||||
|
||||
let localized_attributes_rules = index.localized_attributes_rules(rtxn)?;
|
||||
|
||||
let prefix_search = index.prefix_search(rtxn)?.map(PrefixSearchSettings::from);
|
||||
|
||||
let facet_search = index.facet_search(rtxn)?;
|
||||
let chat = index.chat_config(rtxn).map(ChatSettings::from)?;
|
||||
|
||||
let mut settings = Settings {
|
||||
displayed_attributes: match displayed_attributes {
|
||||
@@ -960,9 +925,8 @@ pub fn settings(
|
||||
Some(rules) => Setting::Set(rules.into_iter().map(|r| r.into()).collect()),
|
||||
None => Setting::Reset,
|
||||
},
|
||||
facet_search: Setting::Set(facet_search),
|
||||
prefix_search: Setting::Set(prefix_search.unwrap_or_default()),
|
||||
chat: Setting::Set(chat),
|
||||
facet_search: Setting::Set(facet_search),
|
||||
_kind: PhantomData,
|
||||
};
|
||||
|
||||
@@ -1190,7 +1154,6 @@ pub(crate) mod test {
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
chat: Setting::NotSet,
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
|
||||
@@ -1222,8 +1185,6 @@ pub(crate) mod test {
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
chat: Setting::NotSet,
|
||||
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
|
||||
|
||||
@@ -32,7 +32,6 @@ async-trait = "0.1.85"
|
||||
bstr = "1.11.3"
|
||||
byte-unit = { version = "5.1.6", features = ["serde"] }
|
||||
bytes = "1.9.0"
|
||||
bumpalo = "3.16.0"
|
||||
clap = { version = "4.5.24", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.15"
|
||||
deserr = { version = "0.6.3", features = ["actix-web"] }
|
||||
@@ -49,7 +48,6 @@ is-terminal = "0.4.13"
|
||||
itertools = "0.14.0"
|
||||
jsonwebtoken = "9.3.0"
|
||||
lazy_static = "1.5.0"
|
||||
liquid = "0.26.9"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
@@ -113,14 +111,12 @@ utoipa = { version = "5.3.1", features = [
|
||||
"openapi_extensions",
|
||||
] }
|
||||
utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] }
|
||||
async-openai = { git = "https://github.com/meilisearch/async-openai", branch = "optional-type-function" }
|
||||
actix-web-lab = { version = "0.24.1", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.10.0"
|
||||
brotli = "6.0.0"
|
||||
# fixed version due to format breakages in v1.40
|
||||
insta = "=1.39.0"
|
||||
insta = { version = "=1.39.0", features = ["redactions"] }
|
||||
manifest-dir-macros = "0.1.18"
|
||||
maplit = "1.0.2"
|
||||
meili-snap = { path = "../meili-snap" }
|
||||
|
||||
@@ -4,7 +4,6 @@ use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::pin::Pin;
|
||||
|
||||
use actix_web::http::header::AUTHORIZATION;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::FromRequest;
|
||||
pub use error::AuthenticationError;
|
||||
@@ -95,44 +94,36 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
|
||||
_payload: &mut actix_web::dev::Payload,
|
||||
) -> Self::Future {
|
||||
match req.app_data::<Data<AuthController>>().cloned() {
|
||||
Some(auth) => match extract_token_from_request(req) {
|
||||
Ok(Some(token)) => {
|
||||
// TODO: find a less hardcoded way?
|
||||
let index = req.match_info().get("index_uid");
|
||||
Box::pin(Self::auth_bearer(
|
||||
auth,
|
||||
token.to_string(),
|
||||
index.map(String::from),
|
||||
req.app_data::<D>().cloned(),
|
||||
))
|
||||
}
|
||||
Ok(None) => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||
Err(e) => Box::pin(err(e.into())),
|
||||
Some(auth) => match req
|
||||
.headers()
|
||||
.get("Authorization")
|
||||
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
|
||||
{
|
||||
Some(mut type_token) => match type_token.next() {
|
||||
Some("Bearer") => {
|
||||
// TODO: find a less hardcoded way?
|
||||
let index = req.match_info().get("index_uid");
|
||||
match type_token.next() {
|
||||
Some(token) => Box::pin(Self::auth_bearer(
|
||||
auth,
|
||||
token.to_string(),
|
||||
index.map(String::from),
|
||||
req.app_data::<D>().cloned(),
|
||||
)),
|
||||
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
|
||||
}
|
||||
}
|
||||
_otherwise => {
|
||||
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
|
||||
}
|
||||
},
|
||||
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||
},
|
||||
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_token_from_request(
|
||||
req: &actix_web::HttpRequest,
|
||||
) -> Result<Option<&str>, AuthenticationError> {
|
||||
match req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
|
||||
{
|
||||
Some(mut type_token) => match type_token.next() {
|
||||
Some("Bearer") => match type_token.next() {
|
||||
Some(token) => Ok(Some(token)),
|
||||
None => Err(AuthenticationError::InvalidToken),
|
||||
},
|
||||
_otherwise => Err(AuthenticationError::MissingAuthorizationHeader),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Policy {
|
||||
fn authenticate(
|
||||
auth: Data<AuthController>,
|
||||
@@ -308,8 +299,8 @@ pub mod policies {
|
||||
auth: &AuthController,
|
||||
token: &str,
|
||||
) -> Result<TenantTokenOutcome, AuthError> {
|
||||
// Only search and chat actions can be accessed by a tenant token.
|
||||
if A != actions::SEARCH && A != actions::CHAT {
|
||||
// Only search action can be accessed by a tenant token.
|
||||
if A != actions::SEARCH {
|
||||
return Ok(TenantTokenOutcome::NotATenantToken);
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,9 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
||||
use meilisearch_auth::{open_auth_store_env, AuthController};
|
||||
use meilisearch_types::milli::constants::VERSION_MAJOR;
|
||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
||||
use meilisearch_types::milli::update::{
|
||||
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
|
||||
};
|
||||
use meilisearch_types::settings::apply_settings_to_builder;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use meilisearch_types::versioning::{
|
||||
@@ -500,7 +502,19 @@ fn import_dump(
|
||||
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
||||
index_scheduler.put_network(network)?;
|
||||
|
||||
let indexer_config = index_scheduler.indexer_config();
|
||||
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
let backup_config;
|
||||
let base_config = index_scheduler.indexer_config();
|
||||
|
||||
let indexer_config = if base_config.max_threads.is_none() {
|
||||
let (thread_pool, _) = default_thread_pool_and_threads();
|
||||
|
||||
let _config = IndexerConfig { thread_pool, ..*base_config };
|
||||
backup_config = _config;
|
||||
&backup_config
|
||||
} else {
|
||||
base_config
|
||||
};
|
||||
|
||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||
// try to process tasks while we're trying to import the indexes.
|
||||
|
||||
@@ -746,10 +746,12 @@ impl IndexerOpts {
|
||||
max_indexing_memory.to_string(),
|
||||
);
|
||||
}
|
||||
export_to_env_if_not_present(
|
||||
MEILI_MAX_INDEXING_THREADS,
|
||||
max_indexing_threads.0.to_string(),
|
||||
);
|
||||
if let Some(max_indexing_threads) = max_indexing_threads.0 {
|
||||
export_to_env_if_not_present(
|
||||
MEILI_MAX_INDEXING_THREADS,
|
||||
max_indexing_threads.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -757,15 +759,15 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
|
||||
let thread_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|index| format!("indexing-thread:{index}"))
|
||||
.num_threads(*other.max_indexing_threads)
|
||||
let thread_pool = ThreadPoolNoAbortBuilder::new_for_indexing()
|
||||
.num_threads(other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2))
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
thread_pool,
|
||||
log_every_n: Some(DEFAULT_LOG_EVERY_N),
|
||||
max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize),
|
||||
thread_pool: Some(thread_pool),
|
||||
max_threads: *other.max_indexing_threads,
|
||||
max_positions_per_attributes: None,
|
||||
skip_index_budget: other.skip_index_budget,
|
||||
..Default::default()
|
||||
@@ -828,31 +830,31 @@ fn total_memory_bytes() -> Option<u64> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
pub struct MaxThreads(usize);
|
||||
#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
pub struct MaxThreads(Option<usize>);
|
||||
|
||||
impl FromStr for MaxThreads {
|
||||
type Err = ParseIntError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
usize::from_str(s).map(Self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MaxThreads {
|
||||
fn default() -> Self {
|
||||
MaxThreads(num_cpus::get() / 2)
|
||||
fn from_str(s: &str) -> Result<MaxThreads, Self::Err> {
|
||||
if s.is_empty() || s == "unlimited" {
|
||||
return Ok(MaxThreads::default());
|
||||
}
|
||||
usize::from_str(s).map(Some).map(MaxThreads)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for MaxThreads {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
match self.0 {
|
||||
Some(threads) => write!(f, "{}", threads),
|
||||
None => write!(f, "unlimited"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for MaxThreads {
|
||||
type Target = usize;
|
||||
type Target = Option<usize>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
|
||||
@@ -1,560 +0,0 @@
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::mem;
|
||||
use std::sync::RwLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{Either, HttpRequest, HttpResponse, Responder};
|
||||
use actix_web_lab::sse::{self, Event, Sse};
|
||||
use async_openai::config::OpenAIConfig;
|
||||
use async_openai::types::{
|
||||
ChatCompletionMessageToolCall, ChatCompletionMessageToolCallChunk,
|
||||
ChatCompletionRequestAssistantMessageArgs, ChatCompletionRequestMessage,
|
||||
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
|
||||
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
|
||||
ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, ChatCompletionToolType,
|
||||
CreateChatCompletionRequest, FinishReason, FunctionCall, FunctionCallStream,
|
||||
FunctionObjectArgs,
|
||||
};
|
||||
use async_openai::Client;
|
||||
use bumpalo::Bump;
|
||||
use futures::StreamExt;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::index::ChatConfig;
|
||||
use meilisearch_types::milli::prompt::{Prompt, PromptData};
|
||||
use meilisearch_types::milli::update::new::document::DocumentFromDb;
|
||||
use meilisearch_types::milli::{
|
||||
DocumentId, FieldIdMapWithMetadata, GlobalFieldsIdsMap, MetadataBuilder, TimeBudget,
|
||||
};
|
||||
use meilisearch_types::Index;
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
use tokio::runtime::Handle;
|
||||
use tokio::sync::mpsc::error::SendError;
|
||||
|
||||
use super::settings::chat::{ChatPrompts, GlobalChatSettings};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
|
||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
||||
use crate::routes::indexes::search::search_kind;
|
||||
use crate::search::{
|
||||
add_search_rules, prepare_search, search_from_kind, HybridQuery, MatchingStrategy, SearchQuery,
|
||||
SemanticRatio,
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
const EMBEDDER_NAME: &str = "openai";
|
||||
const SEARCH_IN_INDEX_FUNCTION_NAME: &str = "_meiliSearchInIndex";
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("/completions").route(web::post().to(chat)));
|
||||
}
|
||||
|
||||
/// Get a chat completion
|
||||
async fn chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
req: HttpRequest,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
web::Json(chat_completion): web::Json<CreateChatCompletionRequest>,
|
||||
) -> impl Responder {
|
||||
// To enable later on, when the feature will be experimental
|
||||
// index_scheduler.features().check_chat("Using the /chat route")?;
|
||||
|
||||
assert_eq!(
|
||||
chat_completion.n.unwrap_or(1),
|
||||
1,
|
||||
"Meilisearch /chat only support one completion at a time (n = 1, n = null)"
|
||||
);
|
||||
|
||||
if chat_completion.stream.unwrap_or(false) {
|
||||
Either::Right(
|
||||
streamed_chat(index_scheduler, auth_ctrl, req, search_queue, chat_completion).await,
|
||||
)
|
||||
} else {
|
||||
Either::Left(
|
||||
non_streamed_chat(index_scheduler, auth_ctrl, req, search_queue, chat_completion).await,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Setup search tool in chat completion request
|
||||
fn setup_search_tool(
|
||||
index_scheduler: &Data<IndexScheduler>,
|
||||
filters: &meilisearch_auth::AuthFilter,
|
||||
chat_completion: &mut CreateChatCompletionRequest,
|
||||
prompts: &ChatPrompts,
|
||||
) -> Result<(), ResponseError> {
|
||||
let tools = chat_completion.tools.get_or_insert_default();
|
||||
if tools.iter().find(|t| t.function.name == SEARCH_IN_INDEX_FUNCTION_NAME).is_some() {
|
||||
panic!("{SEARCH_IN_INDEX_FUNCTION_NAME} function already set");
|
||||
}
|
||||
|
||||
let index_uids: Vec<_> = index_scheduler
|
||||
.index_names()?
|
||||
.into_iter()
|
||||
.filter(|index_uid| filters.is_index_authorized(&index_uid))
|
||||
.collect();
|
||||
|
||||
let tool = ChatCompletionToolArgs::default()
|
||||
.r#type(ChatCompletionToolType::Function)
|
||||
.function(
|
||||
FunctionObjectArgs::default()
|
||||
.name(SEARCH_IN_INDEX_FUNCTION_NAME)
|
||||
.description(&prompts.search_description)
|
||||
.parameters(json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"index_uid": {
|
||||
"type": "string",
|
||||
"enum": index_uids,
|
||||
"description": prompts.search_index_uid_param,
|
||||
},
|
||||
"q": {
|
||||
// Unfortunately, Mistral does not support an array of types, here.
|
||||
// "type": ["string", "null"],
|
||||
"type": "string",
|
||||
"description": prompts.search_q_param,
|
||||
}
|
||||
},
|
||||
"required": ["index_uid", "q"],
|
||||
"additionalProperties": false,
|
||||
}))
|
||||
.strict(true)
|
||||
.build()
|
||||
.unwrap(),
|
||||
)
|
||||
.build()
|
||||
.unwrap();
|
||||
tools.push(tool);
|
||||
chat_completion.messages.insert(
|
||||
0,
|
||||
ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage {
|
||||
content: ChatCompletionRequestSystemMessageContent::Text(prompts.system.clone()),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Process search request and return formatted results
|
||||
async fn process_search_request(
|
||||
index_scheduler: &GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
search_queue: &web::Data<SearchQueue>,
|
||||
auth_token: &str,
|
||||
index_uid: String,
|
||||
q: Option<String>,
|
||||
) -> Result<(Index, String), ResponseError> {
|
||||
let mut query = SearchQuery {
|
||||
q,
|
||||
hybrid: Some(HybridQuery {
|
||||
semantic_ratio: SemanticRatio::default(),
|
||||
embedder: EMBEDDER_NAME.to_string(),
|
||||
}),
|
||||
limit: 20,
|
||||
matching_strategy: MatchingStrategy::Frequency,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let auth_filter = ActionPolicy::<{ actions::SEARCH }>::authenticate(
|
||||
auth_ctrl,
|
||||
auth_token,
|
||||
Some(index_uid.as_str()),
|
||||
)?;
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = auth_filter.get_index_search_rules(&index_uid) {
|
||||
add_search_rules(&mut query.filter, search_rules);
|
||||
}
|
||||
|
||||
// TBD
|
||||
// let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let search_kind =
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let features = index_scheduler.features();
|
||||
let index_cloned = index.clone();
|
||||
let search_result = tokio::task::spawn_blocking(move || -> Result<_, ResponseError> {
|
||||
let rtxn = index_cloned.read_txn()?;
|
||||
let time_budget = match index_cloned
|
||||
.search_cutoff(&rtxn)
|
||||
.map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.clone())))?
|
||||
{
|
||||
Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)),
|
||||
None => TimeBudget::default(),
|
||||
};
|
||||
|
||||
let (search, _is_finite_pagination, _max_total_hits, _offset) =
|
||||
prepare_search(&index_cloned, &rtxn, &query, &search_kind, time_budget, features)?;
|
||||
|
||||
search_from_kind(index_uid, search_kind, search)
|
||||
.map(|(search_results, _)| search_results)
|
||||
.map_err(ResponseError::from)
|
||||
})
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
let search_result = search_result?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
// aggregate.succeed(search_result);
|
||||
if search_result.degraded {
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||
}
|
||||
}
|
||||
// analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
let rtxn = index.read_txn()?;
|
||||
let render_alloc = Bump::new();
|
||||
let formatted = format_documents(&rtxn, &index, &render_alloc, search_result.documents_ids)?;
|
||||
let text = formatted.join("\n");
|
||||
drop(rtxn);
|
||||
|
||||
Ok((index, text))
|
||||
}
|
||||
|
||||
async fn non_streamed_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
req: HttpRequest,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let chat_settings = match index_scheduler.chat_settings().unwrap() {
|
||||
Some(value) => serde_json::from_value(value).unwrap(),
|
||||
None => GlobalChatSettings::default(),
|
||||
};
|
||||
|
||||
let mut config = OpenAIConfig::default();
|
||||
if let Some(api_key) = chat_settings.api_key.as_ref() {
|
||||
config = config.with_api_key(api_key);
|
||||
}
|
||||
if let Some(base_api) = chat_settings.base_api.as_ref() {
|
||||
config = config.with_api_base(base_api);
|
||||
}
|
||||
let client = Client::with_config(config);
|
||||
|
||||
let auth_token = extract_token_from_request(&req)?.unwrap();
|
||||
setup_search_tool(&index_scheduler, filters, &mut chat_completion, &chat_settings.prompts)?;
|
||||
|
||||
let mut response;
|
||||
loop {
|
||||
response = client.chat().create(chat_completion.clone()).await.unwrap();
|
||||
|
||||
let choice = &mut response.choices[0];
|
||||
match choice.finish_reason {
|
||||
Some(FinishReason::ToolCalls) => {
|
||||
let tool_calls = mem::take(&mut choice.message.tool_calls).unwrap_or_default();
|
||||
|
||||
let (meili_calls, other_calls): (Vec<_>, Vec<_>) = tool_calls
|
||||
.into_iter()
|
||||
.partition(|call| call.function.name == SEARCH_IN_INDEX_FUNCTION_NAME);
|
||||
|
||||
chat_completion.messages.push(
|
||||
ChatCompletionRequestAssistantMessageArgs::default()
|
||||
.tool_calls(meili_calls.clone())
|
||||
.build()
|
||||
.unwrap()
|
||||
.into(),
|
||||
);
|
||||
|
||||
for call in meili_calls {
|
||||
let result = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
|
||||
&index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
&search_queue,
|
||||
&auth_token,
|
||||
index_uid,
|
||||
q,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| e.to_string()),
|
||||
Err(err) => Err(err.to_string()),
|
||||
};
|
||||
|
||||
let text = match result {
|
||||
Ok((_, text)) => text,
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(
|
||||
ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id.clone(),
|
||||
content: ChatCompletionRequestToolMessageContent::Text(format!(
|
||||
"{}\n\n{text}",
|
||||
chat_settings.prompts.pre_query
|
||||
)),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
// Let the client call other tools by themselves
|
||||
if !other_calls.is_empty() {
|
||||
response.choices[0].message.tool_calls = Some(other_calls);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
async fn streamed_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
req: HttpRequest,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
) -> Result<impl Responder, ResponseError> {
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let chat_settings = match index_scheduler.chat_settings().unwrap() {
|
||||
Some(value) => serde_json::from_value(value).unwrap(),
|
||||
None => GlobalChatSettings::default(),
|
||||
};
|
||||
|
||||
let mut config = OpenAIConfig::default();
|
||||
if let Some(api_key) = chat_settings.api_key.as_ref() {
|
||||
config = config.with_api_key(api_key);
|
||||
}
|
||||
if let Some(base_api) = chat_settings.base_api.as_ref() {
|
||||
config = config.with_api_base(base_api);
|
||||
}
|
||||
|
||||
let auth_token = extract_token_from_request(&req)?.unwrap().to_string();
|
||||
setup_search_tool(&index_scheduler, filters, &mut chat_completion, &chat_settings.prompts)?;
|
||||
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(10);
|
||||
let _join_handle = Handle::current().spawn(async move {
|
||||
let client = Client::with_config(config.clone());
|
||||
let mut global_tool_calls = HashMap::<u32, Call>::new();
|
||||
let mut finish_reason = None;
|
||||
|
||||
// Limit the number of internal calls to satisfy the search requests of the LLM
|
||||
'main: for _ in 0..20 {
|
||||
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
|
||||
while let Some(result) = response.next().await {
|
||||
match result {
|
||||
Ok(resp) => {
|
||||
let choice = &resp.choices[0];
|
||||
finish_reason = choice.finish_reason;
|
||||
|
||||
#[allow(deprecated)]
|
||||
let ChatCompletionStreamResponseDelta {
|
||||
content,
|
||||
// Using deprecated field but keeping for compatibility
|
||||
function_call: _,
|
||||
ref tool_calls,
|
||||
role: _,
|
||||
refusal: _,
|
||||
} = &choice.delta;
|
||||
|
||||
if content.is_some() {
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(sse::Data::new_json(&resp).unwrap())).await {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
match tool_calls {
|
||||
Some(tool_calls) => {
|
||||
for chunk in tool_calls {
|
||||
let ChatCompletionMessageToolCallChunk {
|
||||
index,
|
||||
id,
|
||||
r#type: _,
|
||||
function,
|
||||
} = chunk;
|
||||
let FunctionCallStream { name, arguments } =
|
||||
function.as_ref().unwrap();
|
||||
global_tool_calls
|
||||
.entry(*index)
|
||||
.and_modify(|call| call.append(arguments.as_ref().unwrap()))
|
||||
.or_insert_with(|| Call {
|
||||
id: id.as_ref().unwrap().clone(),
|
||||
function_name: name.as_ref().unwrap().clone(),
|
||||
arguments: arguments.as_ref().unwrap().clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
None if !global_tool_calls.is_empty() => {
|
||||
let (meili_calls, _other_calls): (Vec<_>, Vec<_>) =
|
||||
mem::take(&mut global_tool_calls)
|
||||
.into_values()
|
||||
.map(|call| ChatCompletionMessageToolCall {
|
||||
id: call.id,
|
||||
r#type: Some(ChatCompletionToolType::Function),
|
||||
function: FunctionCall {
|
||||
name: call.function_name,
|
||||
arguments: call.arguments,
|
||||
},
|
||||
})
|
||||
.partition(|call| call.function.name == SEARCH_IN_INDEX_FUNCTION_NAME);
|
||||
|
||||
chat_completion.messages.push(
|
||||
ChatCompletionRequestAssistantMessageArgs::default()
|
||||
.tool_calls(meili_calls.clone())
|
||||
.build()
|
||||
.unwrap()
|
||||
.into(),
|
||||
);
|
||||
|
||||
for call in meili_calls {
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(
|
||||
sse::Data::new_json(json!({
|
||||
"object": "chat.completion.tool.call",
|
||||
"tool": call,
|
||||
}))
|
||||
.unwrap(),
|
||||
))
|
||||
.await {
|
||||
return;
|
||||
}
|
||||
|
||||
let result = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
|
||||
&index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
&search_queue,
|
||||
&auth_token,
|
||||
index_uid,
|
||||
q,
|
||||
).await.map_err(|e| e.to_string()),
|
||||
Err(err) => Err(err.to_string()),
|
||||
};
|
||||
|
||||
let is_error = result.is_err();
|
||||
let text = match result {
|
||||
Ok((_, text)) => text,
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
let tool = ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id.clone(),
|
||||
content: ChatCompletionRequestToolMessageContent::Text(
|
||||
format!("{}\n\n{text}", chat_settings.prompts.pre_query),
|
||||
),
|
||||
};
|
||||
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(
|
||||
sse::Data::new_json(json!({
|
||||
"object": if is_error {
|
||||
"chat.completion.tool.error"
|
||||
} else {
|
||||
"chat.completion.tool.output"
|
||||
},
|
||||
"tool": ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id,
|
||||
content: ChatCompletionRequestToolMessageContent::Text(
|
||||
text,
|
||||
),
|
||||
},
|
||||
}))
|
||||
.unwrap(),
|
||||
))
|
||||
.await {
|
||||
return;
|
||||
}
|
||||
|
||||
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(tool));
|
||||
}
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("{err:?}");
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(sse::Data::new_json(&json!({
|
||||
"object": "chat.completion.error",
|
||||
"tool": err.to_string(),
|
||||
})).unwrap())).await {
|
||||
return;
|
||||
}
|
||||
|
||||
break 'main;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We must stop if the finish reason is not something we can solve with Meilisearch
|
||||
if finish_reason.map_or(true, |fr| fr != FinishReason::ToolCalls) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let _ = tx.send(Event::Data(sse::Data::new("[DONE]")));
|
||||
});
|
||||
|
||||
Ok(Sse::from_infallible_receiver(rx).with_retry_duration(Duration::from_secs(10)))
|
||||
}
|
||||
|
||||
/// The structure used to aggregate the function calls to make.
|
||||
#[derive(Debug)]
|
||||
struct Call {
|
||||
id: String,
|
||||
function_name: String,
|
||||
arguments: String,
|
||||
}
|
||||
|
||||
impl Call {
|
||||
fn append(&mut self, arguments: &str) {
|
||||
self.arguments.push_str(arguments);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SearchInIndexParameters {
|
||||
/// The index uid to search in.
|
||||
index_uid: String,
|
||||
/// The query parameter to use.
|
||||
q: Option<String>,
|
||||
}
|
||||
|
||||
fn format_documents<'t, 'doc>(
|
||||
rtxn: &RoTxn<'t>,
|
||||
index: &Index,
|
||||
doc_alloc: &'doc Bump,
|
||||
internal_docids: Vec<DocumentId>,
|
||||
) -> Result<Vec<&'doc str>, ResponseError> {
|
||||
let ChatConfig { prompt: PromptData { template, max_bytes }, .. } = index.chat_config(rtxn)?;
|
||||
|
||||
let prompt = Prompt::new(template, max_bytes).unwrap();
|
||||
let fid_map = index.fields_ids_map(rtxn)?;
|
||||
let metadata_builder = MetadataBuilder::from_index(index, rtxn)?;
|
||||
let fid_map_with_meta = FieldIdMapWithMetadata::new(fid_map.clone(), metadata_builder);
|
||||
let global = RwLock::new(fid_map_with_meta);
|
||||
let gfid_map = RefCell::new(GlobalFieldsIdsMap::new(&global));
|
||||
|
||||
let external_ids: Vec<String> = index
|
||||
.external_id_of(rtxn, internal_docids.iter().copied())?
|
||||
.into_iter()
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let mut renders = Vec::new();
|
||||
for (docid, external_docid) in internal_docids.into_iter().zip(external_ids) {
|
||||
let document = match DocumentFromDb::new(docid, rtxn, index, &fid_map)? {
|
||||
Some(doc) => doc,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let text = prompt.render_document(&external_docid, document, &gfid_map, doc_alloc).unwrap();
|
||||
renders.push(text);
|
||||
}
|
||||
|
||||
Ok(renders)
|
||||
}
|
||||
@@ -6,7 +6,7 @@ use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::settings::{
|
||||
settings, ChatSettings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
|
||||
settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
|
||||
};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use tracing::debug;
|
||||
@@ -508,17 +508,6 @@ make_setting_routes!(
|
||||
camelcase_attr: "prefixSearch",
|
||||
analytics: PrefixSearchAnalytics
|
||||
},
|
||||
{
|
||||
route: "/chat",
|
||||
update_verb: put,
|
||||
value_type: ChatSettings,
|
||||
err_type: meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsIndexChat,
|
||||
>,
|
||||
attr: chat,
|
||||
camelcase_attr: "chat",
|
||||
analytics: ChatAnalytics
|
||||
},
|
||||
);
|
||||
|
||||
#[utoipa::path(
|
||||
@@ -608,7 +597,6 @@ pub async fn update_all(
|
||||
),
|
||||
facet_search: FacetSearchAnalytics::new(new_settings.facet_search.as_ref().set()),
|
||||
prefix_search: PrefixSearchAnalytics::new(new_settings.prefix_search.as_ref().set()),
|
||||
chat: ChatAnalytics::new(new_settings.chat.as_ref().set()),
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@@ -10,8 +10,8 @@ use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::FilterableAttributesRule;
|
||||
use meilisearch_types::settings::{
|
||||
ChatSettings, FacetingSettings, PaginationSettings, PrefixSearchSettings,
|
||||
ProximityPrecisionView, RankingRuleView, SettingEmbeddingSettings, TypoSettings,
|
||||
FacetingSettings, PaginationSettings, PrefixSearchSettings, ProximityPrecisionView,
|
||||
RankingRuleView, SettingEmbeddingSettings, TypoSettings,
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
@@ -39,7 +39,6 @@ pub struct SettingsAnalytics {
|
||||
pub non_separator_tokens: NonSeparatorTokensAnalytics,
|
||||
pub facet_search: FacetSearchAnalytics,
|
||||
pub prefix_search: PrefixSearchAnalytics,
|
||||
pub chat: ChatAnalytics,
|
||||
}
|
||||
|
||||
impl Aggregate for SettingsAnalytics {
|
||||
@@ -199,7 +198,6 @@ impl Aggregate for SettingsAnalytics {
|
||||
set: new.prefix_search.set | self.prefix_search.set,
|
||||
value: new.prefix_search.value.or(self.prefix_search.value),
|
||||
},
|
||||
chat: ChatAnalytics { set: new.chat.set | self.chat.set },
|
||||
})
|
||||
}
|
||||
|
||||
@@ -676,18 +674,3 @@ impl PrefixSearchAnalytics {
|
||||
SettingsAnalytics { prefix_search: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct ChatAnalytics {
|
||||
pub set: bool,
|
||||
}
|
||||
|
||||
impl ChatAnalytics {
|
||||
pub fn new(settings: Option<&ChatSettings>) -> Self {
|
||||
Self { set: settings.is_some() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { chat: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +52,6 @@ const PAGINATION_DEFAULT_LIMIT_FN: fn() -> usize = || 20;
|
||||
|
||||
mod api_key;
|
||||
pub mod batches;
|
||||
pub mod chat;
|
||||
mod dump;
|
||||
pub mod features;
|
||||
pub mod indexes;
|
||||
@@ -62,7 +61,6 @@ mod multi_search;
|
||||
mod multi_search_analytics;
|
||||
pub mod network;
|
||||
mod open_api_utils;
|
||||
pub mod settings;
|
||||
mod snapshot;
|
||||
mod swap_indexes;
|
||||
pub mod tasks;
|
||||
@@ -115,9 +113,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
||||
.service(web::scope("/metrics").configure(metrics::configure))
|
||||
.service(web::scope("/experimental-features").configure(features::configure))
|
||||
.service(web::scope("/network").configure(network::configure))
|
||||
.service(web::scope("/chat").configure(chat::configure))
|
||||
.service(web::scope("/settings/chat").configure(settings::chat::configure));
|
||||
.service(web::scope("/network").configure(network::configure));
|
||||
|
||||
#[cfg(feature = "swagger")]
|
||||
{
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::HttpResponse;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::keys::actions;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_settings))
|
||||
.route(web::patch().to(SeqHandler(patch_settings))),
|
||||
);
|
||||
}
|
||||
|
||||
async fn get_settings(
|
||||
index_scheduler: GuardedData<
|
||||
ActionPolicy<{ actions::CHAT_SETTINGS_GET }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = match index_scheduler.chat_settings()? {
|
||||
Some(value) => serde_json::from_value(value).unwrap(),
|
||||
None => GlobalChatSettings::default(),
|
||||
};
|
||||
Ok(HttpResponse::Ok().json(settings))
|
||||
}
|
||||
|
||||
async fn patch_settings(
|
||||
index_scheduler: GuardedData<
|
||||
ActionPolicy<{ actions::CHAT_SETTINGS_UPDATE }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
web::Json(chat_settings): web::Json<GlobalChatSettings>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let chat_settings = serde_json::to_value(chat_settings).unwrap();
|
||||
index_scheduler.put_chat_settings(&chat_settings)?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub struct GlobalChatSettings {
|
||||
pub source: String,
|
||||
pub base_api: Option<String>,
|
||||
pub api_key: Option<String>,
|
||||
pub prompts: ChatPrompts,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub struct ChatPrompts {
|
||||
pub system: String,
|
||||
pub search_description: String,
|
||||
pub search_q_param: String,
|
||||
pub search_index_uid_param: String,
|
||||
pub pre_query: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub struct ChatIndexSettings {
|
||||
pub description: String,
|
||||
pub document_template: String,
|
||||
}
|
||||
|
||||
const DEFAULT_SYSTEM_MESSAGE: &str = "You are a highly capable research assistant with access to powerful search tools. IMPORTANT INSTRUCTIONS:\
|
||||
1. When answering questions, you MUST make multiple tool calls (at least 2-3) to gather comprehensive information.\
|
||||
2. Use different search queries for each tool call - vary keywords, rephrase questions, and explore different semantic angles to ensure broad coverage.\
|
||||
3. Always explicitly announce BEFORE making each tool call by saying: \"I'll search for [specific information] now.\"\
|
||||
4. Combine information from ALL tool calls to provide complete, nuanced answers rather than relying on a single source.\
|
||||
5. For complex topics, break down your research into multiple targeted queries rather than using a single generic search.";
|
||||
|
||||
/// The default description of the searchInIndex tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_TOOL_DESCRIPTION: &str =
|
||||
"Search the database for relevant JSON documents using an optional query.";
|
||||
/// The default description of the searchInIndex `q` parameter tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_Q_PARAMETER_TOOL_DESCRIPTION: &str =
|
||||
"The search query string used to find relevant documents in the index. \
|
||||
This should contain keywords or phrases that best represent what the user is looking for. \
|
||||
More specific queries will yield more precise results.";
|
||||
/// The default description of the searchInIndex `index` parameter tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_INDEX_PARAMETER_TOOL_DESCRIPTION: &str =
|
||||
"The name of the index to search within. An index is a collection of documents organized for search. \
|
||||
Selecting the right index ensures the most relevant results for the user query";
|
||||
|
||||
impl Default for GlobalChatSettings {
|
||||
fn default() -> Self {
|
||||
GlobalChatSettings {
|
||||
source: "openAi".to_string(),
|
||||
base_api: None,
|
||||
api_key: None,
|
||||
prompts: ChatPrompts {
|
||||
system: DEFAULT_SYSTEM_MESSAGE.to_string(),
|
||||
search_description: DEFAULT_SEARCH_IN_INDEX_TOOL_DESCRIPTION.to_string(),
|
||||
search_q_param: DEFAULT_SEARCH_IN_INDEX_Q_PARAMETER_TOOL_DESCRIPTION.to_string(),
|
||||
search_index_uid_param: DEFAULT_SEARCH_IN_INDEX_INDEX_PARAMETER_TOOL_DESCRIPTION
|
||||
.to_string(),
|
||||
pre_query: "".to_string(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
pub mod chat;
|
||||
@@ -882,7 +882,7 @@ pub fn add_search_rules(filter: &mut Option<Value>, rules: IndexSearchRules) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prepare_search<'t>(
|
||||
fn prepare_search<'t>(
|
||||
index: &'t Index,
|
||||
rtxn: &'t RoTxn,
|
||||
query: &'t SearchQuery,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
//! This file implements a queue of searches to process and the ability to control how many searches can be run in parallel.
|
||||
//! We need this because we don't want to process more search requests than we have cores.
|
||||
//! We need this because we don't want to process more search requests than the available CPU cores.
|
||||
//! That slows down everything and consumes RAM for no reason.
|
||||
//! The steps to do a search are to get the `SearchQueue` data structure and try to get a search permit.
|
||||
//! This can fail if the queue is full, and we need to drop your search request to register a new one.
|
||||
@@ -8,7 +8,7 @@
|
||||
//!
|
||||
//! In order to do a search request you should try to get a search permit.
|
||||
//! Retrieve the `SearchQueue` structure from actix-web (`search_queue: Data<SearchQueue>`)
|
||||
//! and right before processing the search, calls the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
|
||||
//! and right before processing the search, call the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
|
||||
//!
|
||||
//! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel.
|
||||
//! Then, the queue/scheduler is going to either:
|
||||
@@ -121,12 +121,12 @@ impl SearchQueue {
|
||||
let mut queue: Vec<oneshot::Sender<Permit>> = Default::default();
|
||||
let mut rng: StdRng = StdRng::from_entropy();
|
||||
let mut searches_running: usize = 0;
|
||||
// By having a capacity of parallelism we ensures that every time a search finish it can release its RAM asap
|
||||
// By having a capacity of parallelism we ensure that every time a search finish it can release its RAM asap
|
||||
let (sender, mut search_finished) = mpsc::channel(parallelism.into());
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
// biased select because we wants to free up space before trying to register new tasks
|
||||
// biased select because we want to free up space before trying to register new tasks
|
||||
biased;
|
||||
_ = search_finished.recv() => {
|
||||
searches_running = searches_running.saturating_sub(1);
|
||||
@@ -148,11 +148,11 @@ impl SearchQueue {
|
||||
|
||||
if searches_running < usize::from(parallelism) && queue.is_empty() {
|
||||
searches_running += 1;
|
||||
// if the search requests die it's not a hard error on our side
|
||||
// if the search requests die, it's not a hard error on our side
|
||||
let _ = search_request.send(Permit { sender: sender.clone() });
|
||||
continue;
|
||||
} else if capacity == 0 {
|
||||
// in the very specific case where we have a capacity of zero
|
||||
// in the very specific case where we have a capacity of zero,
|
||||
// we must refuse the request straight away without going through
|
||||
// the queue stuff.
|
||||
drop(search_request);
|
||||
@@ -183,7 +183,7 @@ impl SearchQueue {
|
||||
.map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))?;
|
||||
|
||||
// If we've been for more than one minute to get a search permit, it's better to simply
|
||||
// abort the search request than spending time processing something were the client
|
||||
// abort the search request than spending time processing something where the client
|
||||
// most certainly exited or got a timeout a long time ago.
|
||||
// We may find a better solution in https://github.com/actix/actix-web/issues/3462.
|
||||
if now.elapsed() > self.time_to_abort {
|
||||
|
||||
@@ -538,7 +538,7 @@ async fn error_add_api_key_parameters_uid_already_exist() {
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
{
|
||||
"message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.",
|
||||
"message": "`uid` field value `[uuid]` is already an existing API key.",
|
||||
"code": "api_key_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#api_key_already_exists"
|
||||
@@ -820,22 +820,6 @@ async fn list_api_keys() {
|
||||
"createdAt": "[ignored]",
|
||||
"updatedAt": "[ignored]"
|
||||
},
|
||||
{
|
||||
"name": "Default Chat API Key",
|
||||
"description": "Use it to chat and search from the frontend",
|
||||
"key": "[ignored]",
|
||||
"uid": "[ignored]",
|
||||
"actions": [
|
||||
"search",
|
||||
"chat.get"
|
||||
],
|
||||
"indexes": [
|
||||
"*"
|
||||
],
|
||||
"expiresAt": null,
|
||||
"createdAt": "[ignored]",
|
||||
"updatedAt": "[ignored]"
|
||||
},
|
||||
{
|
||||
"name": "Default Search API Key",
|
||||
"description": "Use it to search from the frontend",
|
||||
|
||||
@@ -29,6 +29,10 @@ impl<'a> Index<'a, Owned> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_encoder(&self, encoder: Encoder) -> Index<'a, Owned> {
|
||||
Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData }
|
||||
}
|
||||
|
||||
pub async fn load_test_set(&self) -> u64 {
|
||||
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
|
||||
let (response, code) = self
|
||||
@@ -290,6 +294,20 @@ impl Index<'_, Shared> {
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
|
||||
pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._update(primary_key).await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`update_index_fail` succeeded: {}",
|
||||
serde_json::to_string_pretty(&task).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@@ -333,6 +351,14 @@ impl<State> Index<'_, State> {
|
||||
self.service.post_encoded("/indexes", body, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let body = json!({
|
||||
"primaryKey": primary_key,
|
||||
});
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.patch_encoded(url, body, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _delete(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
|
||||
@@ -128,7 +128,8 @@ impl Display for Value {
|
||||
".finishedAt" => "[date]",
|
||||
".duration" => "[duration]",
|
||||
".processingTimeMs" => "[duration]",
|
||||
".details.embedders.*.url" => "[url]"
|
||||
".details.embedders.*.url" => "[url]",
|
||||
".details.dumpUid" => "[dump_uid]",
|
||||
})
|
||||
)
|
||||
}
|
||||
@@ -264,6 +265,24 @@ pub static SCORE_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
])
|
||||
});
|
||||
|
||||
pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SCORE_DOCUMENTS").to_shared();
|
||||
let documents = SCORE_DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
|
||||
pub static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
@@ -333,7 +352,7 @@ pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Sha
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["father", "doggos"], "sortableAttributes": ["doggos"]}),
|
||||
json!({"filterableAttributes": ["father", "doggos", "cattos"], "sortableAttributes": ["doggos"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
@@ -435,3 +454,57 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub static GEO_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Taco Truck",
|
||||
"address": "444 Salsa Street, Burritoville",
|
||||
"type": "Mexican",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": 34.0522,
|
||||
"lng": -118.2437
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "La Bella Italia",
|
||||
"address": "456 Elm Street, Townsville",
|
||||
"type": "Italian",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": "45.4777599",
|
||||
"lng": "9.1967508"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Crêpe Truck",
|
||||
"address": "2 Billig Avenue, Rouenville",
|
||||
"type": "French",
|
||||
"rating": 10
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX
|
||||
.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SHARED_GEO_DOCUMENTS").to_shared();
|
||||
let (response, _code) = index._add_documents(GEO_DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["_geo"], "sortableAttributes": ["_geo"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -399,18 +399,9 @@ impl<State> Server<State> {
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
// Increase timeout for vector-related tests
|
||||
let max_attempts = if url.contains("/tasks/") {
|
||||
if update_id > 1000 {
|
||||
400 // 200 seconds for vector tests
|
||||
} else {
|
||||
100 // 50 seconds for other tests
|
||||
}
|
||||
} else {
|
||||
100 // 50 seconds for other tests
|
||||
};
|
||||
let max_attempts = 400; // 200 seconds total, 0.5s per attempt
|
||||
|
||||
for _ in 0..max_attempts {
|
||||
for i in 0..max_attempts {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
@@ -420,6 +411,10 @@ impl<State> Server<State> {
|
||||
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
if i == max_attempts - 1 {
|
||||
dbg!(response);
|
||||
}
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,39 +1,35 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
use crate::common::{shared_does_not_exists_index, GetAllDocumentsOptions, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, code) = index.delete_document(0).await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_unexisting_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
let (response, code) = index.delete_document(0).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
let update = index.wait_task(response.uid()).await;
|
||||
assert_eq!(update["status"], "succeeded");
|
||||
assert_eq!(code, 202, "{response}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) =
|
||||
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, status_code) = server.index("test").delete_document(0).await;
|
||||
let (task, status_code) = index.delete_document(0).await;
|
||||
assert_eq!(status_code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
@@ -43,20 +39,18 @@ async fn delete_one_document() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index
|
||||
.add_documents(
|
||||
json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]),
|
||||
@@ -67,7 +61,7 @@ async fn clear_all_documents() {
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await;
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
@@ -75,14 +69,14 @@ async fn clear_all_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents_empty_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await;
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].as_array().unwrap().is_empty());
|
||||
@@ -90,33 +84,31 @@ async fn clear_all_documents_empty_index() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_delete_batch_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.delete_batch(vec![]).await;
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
});
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
let response = index.wait_task(task.uid()).await.failed();
|
||||
assert_eq!(response["error"], expected_response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, code) = index.delete_batch(vec![1, 0]).await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let _update = index.wait_task(task.uid()).await;
|
||||
let _update = index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
@@ -125,14 +117,14 @@ async fn delete_batch() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_no_document_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (_response, code) = index.delete_batch(vec![]).await;
|
||||
assert_eq!(code, 202, "{}", _response);
|
||||
let (response, code) = index.delete_batch(vec![]).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
|
||||
let _update = index.wait_task(_response.uid()).await;
|
||||
let _update = index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 3);
|
||||
@@ -140,8 +132,8 @@ async fn delete_no_document_batch() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_document_by_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.update_settings_filterable_attributes(json!(["color"])).await;
|
||||
let (task, _status_code) = index
|
||||
.add_documents(
|
||||
@@ -178,22 +170,22 @@ async fn delete_document_by_filter() {
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -251,22 +243,22 @@ async fn delete_document_by_filter() {
|
||||
let (response, code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "color NOT EXISTS"})).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"taskUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -321,8 +313,8 @@ async fn delete_document_by_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_document_by_complex_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.update_settings_filterable_attributes(json!(["color"])).await;
|
||||
let (task, _status_code) = index
|
||||
.add_documents(
|
||||
@@ -343,22 +335,22 @@ async fn delete_document_by_complex_filter() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -402,22 +394,22 @@ async fn delete_document_by_complex_filter() {
|
||||
.delete_document_by_filter(json!({ "filter": [["color = green", "color NOT EXISTS"]] }))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"taskUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let response = index.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
let response = index.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"indexUid": "doggo",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
|
||||
@@ -832,8 +832,8 @@ async fn get_document_by_ids_and_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_with_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
|
||||
@@ -28,6 +28,7 @@ async fn error_delete_unexisting_index() {
|
||||
let (task, code) = index.delete_index_fail().await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `DOES_NOT_EXISTS` not found.",
|
||||
@@ -57,7 +58,7 @@ async fn loop_delete_add_documents() {
|
||||
}
|
||||
|
||||
for task in tasks {
|
||||
let response = index.wait_task(task).await;
|
||||
let response = index.wait_task(task).await.succeeded();
|
||||
assert_eq!(response["status"], "succeeded", "{}", response);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,19 +52,28 @@ async fn no_index_return_empty_list() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_multiple_indexes() {
|
||||
let server = Server::new().await;
|
||||
server.index("test").create(None).await;
|
||||
let (task, _status_code) = server.index("test1").create(Some("key")).await;
|
||||
let server = Server::new_shared();
|
||||
|
||||
server.index("test").wait_task(task.uid()).await.succeeded();
|
||||
let index_without_key = server.unique_index();
|
||||
let (response_without_key, _status_code) = index_without_key.create(None).await;
|
||||
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
let index_with_key = server.unique_index();
|
||||
let (response_with_key, _status_code) = index_with_key.create(Some("key")).await;
|
||||
|
||||
index_without_key.wait_task(response_without_key.uid()).await.succeeded();
|
||||
index_with_key.wait_task(response_with_key.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server.list_indexes(None, Some(1000)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 2);
|
||||
assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
|
||||
assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||
assert!(arr.len() >= 2, "Expected at least 2 indexes.");
|
||||
assert!(arr
|
||||
.iter()
|
||||
.any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null));
|
||||
assert!(arr
|
||||
.iter()
|
||||
.any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use crate::common::Server;
|
||||
use crate::common::{shared_does_not_exists_index, Server};
|
||||
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stats() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.create(Some("id")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
@@ -15,7 +16,7 @@ async fn stats() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["numberOfDocuments"], 0);
|
||||
assert!(response["isIndexing"] == false);
|
||||
assert_eq!(response["isIndexing"], false);
|
||||
assert!(response["fieldDistribution"].as_object().unwrap().is_empty());
|
||||
|
||||
let documents = json!([
|
||||
@@ -31,7 +32,6 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
@@ -39,7 +39,7 @@ async fn stats() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["numberOfDocuments"], 2);
|
||||
assert!(response["isIndexing"] == false);
|
||||
assert_eq!(response["isIndexing"], false);
|
||||
assert_eq!(response["fieldDistribution"]["id"], 2);
|
||||
assert_eq!(response["fieldDistribution"]["name"], 1);
|
||||
assert_eq!(response["fieldDistribution"]["age"], 1);
|
||||
@@ -47,11 +47,11 @@ async fn stats() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_stats_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").stats().await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (response, code) = index.stats().await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
|
||||
@@ -2,28 +2,26 @@ use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Server;
|
||||
use crate::common::{shared_does_not_exists_index, shared_index_with_documents, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_primary_key() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(None).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["uid"], index.uid);
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
|
||||
@@ -39,24 +37,23 @@ async fn update_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_and_update_with_different_encoding() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index_with_encoder("test", Encoder::Gzip);
|
||||
let (_, code) = index.create(None).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_encoder(Encoder::Gzip);
|
||||
let (create_task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(create_task.uid()).await.succeeded();
|
||||
|
||||
let index = server.index_with_encoder("test", Encoder::Brotli);
|
||||
let index = index.with_encoder(Encoder::Brotli);
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_nothing() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task1, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
@@ -67,35 +64,20 @@ async fn update_nothing() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task2.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task2.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_existing_primary_key() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.create(Some("id")).await;
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
let (update_task, code) = index.update_index_fail(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": "11",
|
||||
"content": "foobar"
|
||||
}
|
||||
]);
|
||||
index.add_documents(documents, None).await;
|
||||
|
||||
let (task, code) = index.update(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
let response = index.wait_task(update_task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test`: Index already has a primary key: `id`.",
|
||||
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
|
||||
"code": "index_primary_key_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
|
||||
@@ -106,15 +88,15 @@ async fn error_update_existing_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (task, code) = server.index("test").update(None).await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = server.index("test").wait_task(task.uid()).await;
|
||||
let response = index.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
|
||||
@@ -146,8 +146,8 @@ static DOCUMENT_DISTINCT_KEY: &str = "product_id";
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_offset_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
@@ -163,50 +163,50 @@ async fn distinct_search_with_offset_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["estimatedTotalHits"] , @"11");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"10");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"1");
|
||||
snapshot!(format!("{:?}", hits), @r#"["345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
}
|
||||
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4130
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_pagination_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
@@ -222,7 +222,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["page"], @"0");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -231,7 +231,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["page"], @"1");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -240,7 +240,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -249,7 +249,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"3");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -258,7 +258,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["page"], @"4");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -267,7 +267,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"3");
|
||||
snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["987654", "234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"2");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@@ -275,13 +275,13 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_at_search_time() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("tamo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
|
||||
let task = index.wait_task(task.uid()).await;
|
||||
let task = index.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(task, name: "succeed");
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<String> {
|
||||
@@ -299,7 +299,7 @@ async fn distinct_at_search_time() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"3");
|
||||
snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###);
|
||||
snapshot!(format!("{hits:?}"), @r###"["1", "2", "3"]"###);
|
||||
snapshot!(response["page"], @"1");
|
||||
snapshot!(response["totalPages"], @"1");
|
||||
snapshot!(response["totalHits"], @"3");
|
||||
|
||||
@@ -708,7 +708,7 @@ async fn filter_invalid_attribute_array() {
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -729,7 +729,7 @@ async fn filter_invalid_attribute_string() {
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -886,7 +886,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -912,7 +912,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -933,7 +933,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -959,7 +959,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -985,7 +985,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -1144,7 +1144,7 @@ async fn search_on_unknown_field() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@@ -1165,7 +1165,7 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@@ -1183,7 +1183,7 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@@ -1196,10 +1196,8 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_at_search_time() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) =
|
||||
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
@@ -1209,7 +1207,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
@@ -1224,7 +1222,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
@@ -1239,7 +1237,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
|
||||
@@ -50,13 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@@ -65,21 +63,18 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = server.delete_index("test").await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
eprintln!("Settings -> Documents -> test");
|
||||
let index = server.index("test");
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@@ -88,14 +83,13 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = server.delete_index("test").await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -105,20 +99,20 @@ async fn simple_facet_search() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventure"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_on_movies() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -212,23 +206,23 @@ async fn simple_facet_search_on_movies() {
|
||||
]);
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Action","count":2},{"value":"Adventure","count":3},{"value":"Drama","count":3},{"value":"Fantasy","count":1},{"value":"Romance","count":1},{"value":"Science Fiction","count":1}]"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn advanced_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -251,8 +245,8 @@ async fn advanced_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn more_advanced_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -275,8 +269,8 @@ async fn more_advanced_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_with_max_values() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
|
||||
@@ -287,14 +281,14 @@ async fn simple_facet_search_with_max_values() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_by_count_with_max_values() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
@@ -309,14 +303,14 @@ async fn simple_facet_search_by_count_with_max_values() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn non_filterable_facet_search_error() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
@@ -324,17 +318,17 @@ async fn non_filterable_facet_search_error() {
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adv"})).await;
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn facet_search_dont_support_words() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@@ -344,14 +338,14 @@ async fn facet_search_dont_support_words() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_with_sort_by_count() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
|
||||
@@ -362,7 +356,7 @@ async fn simple_facet_search_with_sort_by_count() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
let hits = response["facetHits"].as_array().unwrap();
|
||||
assert_eq!(hits.len(), 2);
|
||||
assert_eq!(hits[0], json!({ "value": "Action", "count": 3 }));
|
||||
@@ -371,25 +365,25 @@ async fn simple_facet_search_with_sort_by_count() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_and_deactivate_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": false,
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "The facet search is disabled for this index",
|
||||
@@ -402,8 +396,8 @@ async fn add_documents_and_deactivate_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_and_add_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -411,16 +405,16 @@ async fn deactivate_facet_search_and_add_documents() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "The facet search is disabled for this index",
|
||||
@@ -433,8 +427,8 @@ async fn deactivate_facet_search_and_add_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -442,31 +436,31 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": true,
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -474,25 +468,25 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": serde_json::Value::Null,
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -618,8 +612,8 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_facet_search_on_movies() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -925,26 +919,26 @@ async fn distinct_facet_search_on_movies() {
|
||||
]);
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;
|
||||
|
||||
// non-exhaustive facet count is counting 27 documents with the facet query "blob" but there are only 23 documents with a distinct color.
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":27}]"###);
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "", "exhaustiveFacetCount": true })).await;
|
||||
|
||||
// exhaustive facet count is counting 23 documents with the facet query "blob" which is the number of distinct colors.
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":23}]"###);
|
||||
}
|
||||
|
||||
@@ -4,23 +4,16 @@ use tempfile::TempDir;
|
||||
|
||||
use super::test_settings_documents_indexing_swapping_and_search;
|
||||
use crate::{
|
||||
common::{default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS},
|
||||
common::{
|
||||
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
|
||||
DOCUMENTS, NESTED_DOCUMENTS,
|
||||
},
|
||||
json,
|
||||
};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_filter_string_notation() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
let res = index.wait_task(task.uid()).await;
|
||||
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -28,44 +21,34 @@ async fn search_with_filter_string_notation() {
|
||||
"filter": "title = Gläss"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = server.index("nested");
|
||||
let nested_index = shared_index_with_nested_documents().await;
|
||||
|
||||
let (_, code) =
|
||||
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task, code) = index.add_documents(documents, None).await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
let res = index.wait_task(task.uid()).await;
|
||||
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
|
||||
|
||||
index
|
||||
nested_index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "cattos = pésti"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["hits"][0]["id"], json!(852));
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
index
|
||||
nested_index
|
||||
.search(
|
||||
json!({
|
||||
"filter": "doggos.age > 5"
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(response["hits"][0]["id"], json!(654));
|
||||
assert_eq!(response["hits"][1]["id"], json!(951));
|
||||
@@ -82,7 +65,7 @@ async fn search_with_filter_array_notation() {
|
||||
"filter": ["title = Gläss"]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index
|
||||
@@ -90,7 +73,7 @@ async fn search_with_filter_array_notation() {
|
||||
"filter": [["title = Gläss", "title = \"Shazam!\"", "title = \"Escape Room\""]]
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 3);
|
||||
}
|
||||
|
||||
@@ -116,7 +99,7 @@ async fn search_with_contains_filter() {
|
||||
"filter": "title CONTAINS cap"
|
||||
}))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
@@ -269,16 +252,14 @@ async fn search_with_pattern_filter_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
let temp = TempDir::new().unwrap();
|
||||
let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap();
|
||||
let server = Server::new_shared();
|
||||
|
||||
eprintln!("Documents -> Settings -> test");
|
||||
let index = server.index("test");
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index
|
||||
.update_settings(json!({"filterableAttributes": [{
|
||||
@@ -289,9 +270,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@@ -335,7 +315,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -355,9 +335,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@@ -467,9 +446,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter returns an error
|
||||
index
|
||||
@@ -481,7 +459,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -567,9 +545,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
}
|
||||
}]}))
|
||||
.await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
snapshot!(response["status"], @r###""succeeded""###);
|
||||
assert_eq!(code, 202, "{task}");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
// Check if the Equality filter works
|
||||
index
|
||||
@@ -613,7 +590,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -720,7 +697,7 @@ async fn test_filterable_attributes_priority() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
|
||||
"message": "Index `[uuid]`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@@ -746,7 +723,7 @@ async fn test_filterable_attributes_priority() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
|
||||
"message": "Index `[uuid]`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
|
||||
@@ -1,56 +1,14 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::shared_index_with_geo_documents;
|
||||
use crate::json;
|
||||
|
||||
use super::test_settings_documents_indexing_swapping_and_search;
|
||||
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Taco Truck",
|
||||
"address": "444 Salsa Street, Burritoville",
|
||||
"type": "Mexican",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": 34.0522,
|
||||
"lng": -118.2437
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"name": "La Bella Italia",
|
||||
"address": "456 Elm Street, Townsville",
|
||||
"type": "Italian",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": "45.4777599",
|
||||
"lng": "9.1967508"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Crêpe Truck",
|
||||
"address": "2 Billig Avenue, Rouenville",
|
||||
"type": "French",
|
||||
"rating": 10
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn geo_sort_with_geo_strings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||
index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let index = shared_index_with_geo_documents().await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -59,7 +17,7 @@ async fn geo_sort_with_geo_strings() {
|
||||
"sort": ["_geoPoint(0.0, 0.0):asc"]
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@@ -67,14 +25,7 @@ async fn geo_sort_with_geo_strings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn geo_bounding_box_with_string_and_number() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||
index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||
let (ret, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(ret.uid()).await.succeeded();
|
||||
let index = shared_index_with_geo_documents().await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -82,7 +33,7 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
"filter": "_geoBoundingBox([89, 179], [-89, -179])",
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -124,14 +75,7 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
#[actix_rt::test]
|
||||
async fn bug_4640() {
|
||||
// https://github.com/meilisearch/meilisearch/issues/4640
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||
let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||
index.wait_task(ret.uid()).await.succeeded();
|
||||
let index = shared_index_with_geo_documents().await;
|
||||
|
||||
// Sort the document with the second one first
|
||||
index
|
||||
@@ -140,7 +84,7 @@ async fn bug_4640() {
|
||||
"sort": ["_geoPoint(45.4777599, 9.1967508):asc"],
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -203,7 +147,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
|
||||
&json!({"q": "jean"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -248,7 +192,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
|
||||
&json!({"q": "bob"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -285,7 +229,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
|
||||
&json!({"q": "intel"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -325,7 +269,7 @@ async fn geo_sort_with_words() {
|
||||
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "sort"], "sortableAttributes": [RESERVED_GEO_FIELD_NAME]}),
|
||||
&json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
|
||||
@@ -2,31 +2,31 @@ use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents_user_provided<'a>(
|
||||
server: &'a Server,
|
||||
server: &'a Server<Shared>,
|
||||
documents: &Value,
|
||||
) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "embedders": {"default": {
|
||||
"source": "userProvided",
|
||||
"dimensions": 2}}} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
|
||||
async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "embedders": {"default": {
|
||||
@@ -36,11 +36,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> I
|
||||
"documentTemplate": "{{doc.title}}, {{doc.desc}}"
|
||||
}}} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}
|
||||
@@ -76,6 +76,48 @@ static SINGLE_DOCUMENT_VEC: Lazy<Value> = Lazy::new(|| {
|
||||
}])
|
||||
});
|
||||
|
||||
static TEST_DISTINCT_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
// for query "Captain Marvel" and vector [1.0, 1.0]
|
||||
json!([
|
||||
{
|
||||
"id": 0,
|
||||
"search": "Captain Planet",
|
||||
"desc": "#2 for keyword search, #3 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [-1.0, 0.0],
|
||||
},
|
||||
"distinct": 0
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"search": "Captain Marvel",
|
||||
"desc": "#1 for keyword search, #4 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [-1.0, -1.0],
|
||||
},
|
||||
"distinct": 1
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"search": "Some Captain at least",
|
||||
"desc": "#3 for keyword search, #1 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [1.0, 1.0],
|
||||
},
|
||||
"distinct": 0
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"search": "Irrelevant Capitaine",
|
||||
"desc": "#4 for keyword search, #2 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [1.0, 0.0],
|
||||
},
|
||||
"distinct": 1
|
||||
},
|
||||
])
|
||||
});
|
||||
|
||||
static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
@@ -97,8 +139,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -130,8 +172,8 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn limit_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -143,8 +185,8 @@ async fn limit_offset() {
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -159,8 +201,8 @@ async fn limit_offset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_hf() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -211,8 +253,8 @@ async fn simple_search_hf() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distribution_shift() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true});
|
||||
let (response, code) = index.search_post(search.clone()).await;
|
||||
@@ -233,7 +275,7 @@ async fn distribution_shift() {
|
||||
.await;
|
||||
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let response = server.wait_task(response.uid()).await;
|
||||
let response = server.wait_task(response.uid()).await.succeeded();
|
||||
snapshot!(response["details"], @r#"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"#);
|
||||
|
||||
let (response, code) = index.search_post(search).await;
|
||||
@@ -243,8 +285,8 @@ async fn distribution_shift() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn highlighter() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
||||
@@ -298,8 +340,8 @@ async fn highlighter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_semantic_ratio() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -370,8 +412,8 @@ async fn invalid_semantic_ratio() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn single_document() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SINGLE_DOCUMENT_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SINGLE_DOCUMENT_VEC).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -386,8 +428,8 @@ async fn single_document() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn query_combination() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||
|
||||
// search without query and vector, but with hybrid => still placeholder
|
||||
let (response, code) = index
|
||||
@@ -493,10 +535,54 @@ async fn query_combination() {
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
}
|
||||
|
||||
// see <https://github.com/meilisearch/meilisearch/issues/5526>
|
||||
#[actix_rt::test]
|
||||
async fn distinct_is_applied() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_user_provided(server, &TEST_DISTINCT_DOCUMENTS).await;
|
||||
|
||||
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// pure keyword
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.0, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1},{"id":0,"search":"Captain Planet","desc":"#2 for keyword search, #3 for hybrid search","distinct":0}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"null");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
|
||||
// pure semantic
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":3,"search":"Irrelevant Capitaine","desc":"#4 for keyword search, #2 for hybrid search","distinct":1}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"2");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
|
||||
// hybrid
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"1");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn retrieve_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
@@ -546,7 +632,7 @@ async fn retrieve_vectors() {
|
||||
let (response, code) = index
|
||||
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
|
||||
.await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
@@ -596,7 +682,7 @@ async fn retrieve_vectors() {
|
||||
// remove `_vectors` from displayed attributes
|
||||
let (response, code) =
|
||||
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
|
||||
@@ -89,9 +89,9 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
.update_settings(
|
||||
@@ -196,9 +196,9 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -211,10 +211,10 @@ async fn force_locales() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -274,9 +274,9 @@ async fn force_locales() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales_with_pattern() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -289,10 +289,10 @@ async fn force_locales_with_pattern() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -352,9 +352,9 @@ async fn force_locales_with_pattern() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales_with_pattern_nested() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@@ -365,10 +365,10 @@ async fn force_locales_with_pattern_nested() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -423,9 +423,9 @@ async fn force_locales_with_pattern_nested() {
|
||||
}
|
||||
#[actix_rt::test]
|
||||
async fn force_different_locales_with_pattern() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -440,10 +440,10 @@ async fn force_different_locales_with_pattern() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -499,9 +499,9 @@ async fn force_different_locales_with_pattern() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -518,10 +518,10 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -577,9 +577,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn auto_infer_locales_at_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@@ -592,10 +592,10 @@ async fn auto_infer_locales_at_search() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -676,9 +676,9 @@ async fn auto_infer_locales_at_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_different_locales_with_pattern_nested() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@@ -691,10 +691,10 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -774,9 +774,9 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn settings_change() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -789,10 +789,10 @@ async fn settings_change() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 1,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -852,10 +852,10 @@ async fn settings_change() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -906,9 +906,9 @@ async fn settings_change() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_locales() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
.update_settings(
|
||||
@@ -945,9 +945,9 @@ async fn invalid_locales() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_localized_attributes_rules() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
"localizedAttributes": [
|
||||
@@ -1015,19 +1015,19 @@ async fn invalid_localized_attributes_rules() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
"filterableAttributes": ["name_en", "name_ja", "name_zh"],
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -1073,9 +1073,9 @@ async fn simple_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn facet_search_with_localized_attributes() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@@ -1086,10 +1086,10 @@ async fn facet_search_with_localized_attributes() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -1146,9 +1146,9 @@ async fn facet_search_with_localized_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn swedish_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{"id": "tra1-1", "product": "trä"},
|
||||
{"id": "tra2-1", "product": "traktor"},
|
||||
@@ -1269,9 +1269,9 @@ async fn swedish_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn german_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{"id": 1, "product": "Interkulturalität"},
|
||||
{"id": 2, "product": "Wissensorganisation"},
|
||||
|
||||
@@ -2,11 +2,11 @@ use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -48,8 +48,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
@@ -75,8 +75,8 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_typo() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Capitain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
@@ -102,8 +102,8 @@ async fn search_with_typo() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_unknown_word() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Supercopter Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2296,6 +2296,7 @@ async fn error_remote_500_once() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore]
|
||||
async fn error_remote_timeout() {
|
||||
let ms0 = Server::new().await;
|
||||
let ms1 = Server::new().await;
|
||||
|
||||
@@ -7,7 +7,7 @@ async fn default_search_should_return_estimated_total_hit() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert!(response.get("estimatedTotalHits").is_some());
|
||||
assert!(response.get("limit").is_some());
|
||||
assert!(response.get("offset").is_some());
|
||||
@@ -25,7 +25,7 @@ async fn simple_search() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"page": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 1);
|
||||
@@ -44,7 +44,7 @@ async fn page_zero_should_not_return_any_result() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"page": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 0);
|
||||
@@ -58,7 +58,7 @@ async fn hits_per_page_1() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"hitsPerPage": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
@@ -72,7 +72,7 @@ async fn hits_per_page_0_should_not_return_any_result() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"hitsPerPage": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
@@ -126,7 +126,7 @@ async fn ensure_placeholder_search_hit_count_valid() {
|
||||
for page in 0..=4 {
|
||||
index
|
||||
.search(json!({"page": page, "hitsPerPage": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["totalHits"], 4);
|
||||
assert_eq!(response["totalPages"], 4);
|
||||
})
|
||||
|
||||
@@ -2,11 +2,11 @@ use meili_snap::{json_string, snapshot};
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -34,8 +34,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
@@ -51,8 +51,8 @@ async fn simple_search_on_title() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_no_searchable_attribute_set() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -93,8 +93,8 @@ async fn search_no_searchable_attribute_set() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_all_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
|
||||
@@ -106,8 +106,8 @@ async fn search_on_all_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_all_attributes_restricted_set() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
@@ -121,8 +121,8 @@ async fn search_on_all_attributes_restricted_set() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_prefix_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
@@ -135,8 +135,8 @@ async fn simple_prefix_search_on_title() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title_matching_strategy_all() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search matching strategy all should only return 1 document (ids: 2).
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["title"], "matchingStrategy": "all"}), |response, code| {
|
||||
@@ -148,8 +148,8 @@ async fn simple_search_on_title_matching_strategy_all() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_no_field() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search on no field shouldn't return any document.
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": []}), |response, code| {
|
||||
@@ -161,8 +161,8 @@ async fn simple_search_on_no_field() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before document 2.
|
||||
index
|
||||
@@ -189,8 +189,8 @@ async fn word_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order_exact_words() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) = index
|
||||
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
|
||||
.await;
|
||||
@@ -221,9 +221,9 @@ async fn word_ranking_rule_order_exact_words() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn typo_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Capitain Marivel",
|
||||
@@ -260,9 +260,9 @@ async fn typo_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attributes_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@@ -301,9 +301,9 @@ async fn attributes_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn exactness_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@@ -340,9 +340,9 @@ async fn exactness_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_exact_field() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@@ -359,7 +359,7 @@ async fn search_on_exact_field() {
|
||||
|
||||
let (response, code) =
|
||||
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
// Searching on an exact attribute should only return the document matching without typo.
|
||||
index
|
||||
@@ -372,7 +372,7 @@ async fn search_on_exact_field() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn phrase_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let documents = json!([
|
||||
{ "id": 8, "desc": "Document Review", "title": "Document Review Specialist II" },
|
||||
{ "id": 5, "desc": "Document Review", "title": "Document Review Attorney" },
|
||||
@@ -383,7 +383,7 @@ async fn phrase_search_on_title() {
|
||||
{ "id": 7, "desc": "Document Review", "title": "Document Review Specialist II" },
|
||||
{ "id": 6, "desc": "Document Review", "title": "Document Review (Entry Level)" }
|
||||
]);
|
||||
let index = index_with_documents(&server, &documents).await;
|
||||
let index = index_with_documents(server, &documents).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@@ -416,3 +416,381 @@ async fn phrase_search_on_title() {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
static NESTED_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"details": {
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"weaknesses": ["magic", "requires transformation"],
|
||||
"outfit": {
|
||||
"has_cape": true,
|
||||
"colors": {
|
||||
"primary": "red",
|
||||
"secondary": "gold"
|
||||
}
|
||||
}
|
||||
},
|
||||
"id": "1",
|
||||
},
|
||||
{
|
||||
"details": {
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"blue_skin": true,
|
||||
"outfit": {
|
||||
"has_cape": false
|
||||
}
|
||||
},
|
||||
"id": "2",
|
||||
},
|
||||
{
|
||||
"details": {
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"weaknesses": ["magic", "power instability"],
|
||||
"outfit": {
|
||||
"has_cape": false
|
||||
}
|
||||
},
|
||||
"id": "3",
|
||||
}])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard should match to 'details.' attribute
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard should match to any attribute inside 'details.'
|
||||
// It's worth noting the difference between 'details.*' and '*.title'
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Should return 1 document (ids: 1)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "gold", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Should return 2 documents (ids: 1 and 2)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "true", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) =
|
||||
index.update_settings_searchable_attributes(json!(["details.title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_title_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_details_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "mag", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "3"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_matching_strategy_all() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Nested search matching strategy all should only return 1 document (ids: 3)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before documents 1 and 2
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.desc", "*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]
|
||||
"###
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before documents 1 and 2
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]
|
||||
"###
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ source: crates/meilisearch/tests/search/distinct.rs
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "tamo",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
|
||||
@@ -3,8 +3,8 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
|
||||
index.wait_task(task1.uid()).await.succeeded();
|
||||
@@ -24,8 +24,8 @@ async fn set_and_reset_distinct_attribute() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(update_task1.uid()).await.succeeded();
|
||||
|
||||
@@ -11,59 +11,62 @@ macro_rules! test_setting_routes {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (_response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
server.index("").wait_task(0).await;
|
||||
let (response, code) = server.index("test").get().await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (_, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = server.index("").wait_task(0).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_default() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
index.wait_task(0).await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_default() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let url = format!("/indexes/{}/settings/{}",
|
||||
index.uid,
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
let expected = crate::json!($default_value);
|
||||
assert_eq!(expected, response);
|
||||
}
|
||||
@@ -185,15 +188,16 @@ test_setting_routes!(
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").settings().await;
|
||||
assert_eq!(code, 404, "{}", response)
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 404, "{response}")
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
@@ -237,9 +241,8 @@ async fn get_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn secrets_are_hidden_in_settings() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
@@ -259,11 +262,11 @@ async fn secrets_are_hidden_in_settings() {
|
||||
.await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
"taskUid": 1,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@@ -272,7 +275,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
let settings_update_uid = response.uid();
|
||||
|
||||
index.wait_task(settings_update_uid).await;
|
||||
index.wait_task(settings_update_uid).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
@@ -360,16 +363,16 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_settings_unknown_field() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (_response, code) = index.update_settings(json!({"foo": 12})).await;
|
||||
assert_eq!(code, 400);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_partial_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
@@ -388,20 +391,18 @@ async fn test_partial_update() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_delete_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.delete_settings().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn reset_all_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@@ -413,7 +414,6 @@ async fn reset_all_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (update_task,_status_code) = index
|
||||
@@ -446,17 +446,15 @@ async fn reset_all_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_setting_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.update_settings(json!({})).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (_response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
let (task, _status_code) = index.delete_settings().await;
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -477,8 +475,8 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_set_invalid_ranking_rules() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
|
||||
let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
|
||||
@@ -495,8 +493,8 @@ async fn error_set_invalid_ranking_rules() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -516,8 +514,8 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn granular_filterable_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
|
||||
let (response, code) =
|
||||
@@ -535,7 +533,7 @@ async fn granular_filterable_attributes() {
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response["filterableAttributes"]), @r###"
|
||||
[
|
||||
{
|
||||
|
||||
@@ -26,8 +26,8 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -38,7 +38,7 @@ async fn attribute_scale_search() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [1, 3, 2] instead of [3, 1, 2]
|
||||
@@ -99,8 +99,8 @@ async fn attribute_scale_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_phrase_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -167,8 +167,8 @@ async fn attribute_scale_phrase_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_scale_set_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -282,8 +282,8 @@ async fn word_scale_set_and_reset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_default_ranking_rules() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@@ -293,7 +293,7 @@ async fn attribute_scale_default_ranking_rules() {
|
||||
"proximityPrecision": "byAttribute"
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [3, 1, 2]
|
||||
|
||||
@@ -5,8 +5,8 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index
|
||||
.update_settings(json!({
|
||||
@@ -70,8 +70,8 @@ async fn set_and_search() {
|
||||
},
|
||||
]);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
@@ -224,8 +224,8 @@ async fn advanced_synergies() {
|
||||
},
|
||||
]);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
|
||||
@@ -6,11 +6,11 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -26,12 +26,12 @@ async fn similar_unexisting_index() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_unexisting_parameter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "marin": "hello"}), |response, code| {
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
})
|
||||
.await;
|
||||
@@ -39,8 +39,8 @@ async fn similar_unexisting_parameter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -53,7 +53,7 @@ async fn similar_bad_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"id": ["doggo"], "embedder": "manual"})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@@ -69,8 +69,8 @@ async fn similar_bad_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -83,7 +83,7 @@ async fn similar_bad_ranking_score_threshold() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": ["doggo"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@@ -99,8 +99,8 @@ async fn similar_bad_ranking_score_threshold() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_invalid_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -113,7 +113,7 @@ async fn similar_invalid_ranking_score_threshold() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": 42})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@@ -129,8 +129,8 @@ async fn similar_invalid_ranking_score_threshold() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_invalid_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -143,7 +143,7 @@ async fn similar_invalid_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": "http://invalid-docid/", "embedder": "manual"})).await;
|
||||
@@ -160,8 +160,8 @@ async fn similar_invalid_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_not_found_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -174,7 +174,7 @@ async fn similar_not_found_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await;
|
||||
@@ -191,8 +191,8 @@ async fn similar_not_found_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -205,7 +205,7 @@ async fn similar_bad_offset() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await;
|
||||
@@ -233,8 +233,8 @@ async fn similar_bad_offset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_limit() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -247,7 +247,7 @@ async fn similar_bad_limit() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await;
|
||||
@@ -277,8 +277,8 @@ async fn similar_bad_limit() {
|
||||
async fn similar_bad_filter() {
|
||||
// Since a filter is deserialized as a json Value it will never fail to deserialize.
|
||||
// Thus the error message is not generated by deserr but written by us.
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -291,7 +291,7 @@ async fn similar_bad_filter() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
@@ -316,8 +316,8 @@ async fn similar_bad_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_object() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -330,7 +330,7 @@ async fn filter_invalid_syntax_object() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -354,8 +354,8 @@ async fn filter_invalid_syntax_object() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -368,7 +368,7 @@ async fn filter_invalid_syntax_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -392,8 +392,8 @@ async fn filter_invalid_syntax_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -406,7 +406,7 @@ async fn filter_invalid_syntax_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -432,8 +432,8 @@ async fn filter_invalid_syntax_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -446,7 +446,7 @@ async fn filter_invalid_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -473,8 +473,8 @@ async fn filter_invalid_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -487,7 +487,7 @@ async fn filter_invalid_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -514,8 +514,8 @@ async fn filter_invalid_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -528,7 +528,7 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -554,8 +554,8 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -568,7 +568,7 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -594,8 +594,8 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -608,7 +608,7 @@ async fn filter_reserved_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -634,8 +634,8 @@ async fn filter_reserved_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -648,7 +648,7 @@ async fn filter_reserved_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -674,8 +674,8 @@ async fn filter_reserved_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_point_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -688,7 +688,7 @@ async fn filter_reserved_geo_point_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -714,8 +714,8 @@ async fn filter_reserved_geo_point_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_point_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -728,7 +728,7 @@ async fn filter_reserved_geo_point_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@@ -754,8 +754,8 @@ async fn filter_reserved_geo_point_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_retrieve_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await;
|
||||
@@ -806,8 +806,8 @@ async fn similar_bad_retrieve_vectors() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_embedder() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -820,7 +820,7 @@ async fn similar_bad_embedder() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
|
||||
@@ -6,8 +6,8 @@ use crate::common::Server;
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
async fn get_version() {
|
||||
let server = Server::new_shared();
|
||||
let (response, code) = server.version().await;
|
||||
assert_eq!(code, 200);
|
||||
let version = response.as_object().unwrap();
|
||||
@@ -18,7 +18,7 @@ async fn get_settings_unexisting_index() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_healthyness() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
|
||||
let (response, status_code) = server.service.get("/health").await;
|
||||
assert_eq!(status_code, 200);
|
||||
@@ -55,7 +55,7 @@ async fn stats() {
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
assert_eq!(code, 202, "{response}");
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
@@ -78,8 +78,8 @@ async fn stats() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_remove_embeddings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -216,8 +216,8 @@ async fn add_remove_embeddings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_remove_embedded_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -293,8 +293,8 @@ async fn add_remove_embedded_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_embedder_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
// 2 embedded documents for 3 embeddings in total
|
||||
// but no embedders are added in the settings yet so we expect 0 embedded documents for 0 embeddings in total
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
mod errors;
|
||||
mod webhook;
|
||||
|
||||
use meili_snap::insta::assert_json_snapshot;
|
||||
use meili_snap::snapshot;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
@@ -11,14 +10,12 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_unexisting_task_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.get_task(1).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.get_task(u32::MAX as u64).await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Task `1` not found.",
|
||||
"message": "Task `4294967295` not found.",
|
||||
"code": "task_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
||||
@@ -30,8 +27,8 @@ async fn error_get_unexisting_task_status() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_task_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (create_task, _status_code) = index.create(None).await;
|
||||
let (add_task, _status_code) = index
|
||||
.add_documents(
|
||||
@@ -42,7 +39,7 @@ async fn get_task_status() {
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_task(create_task.uid()).await.succeeded();
|
||||
server.wait_task(create_task.uid()).await.succeeded();
|
||||
let (_response, code) = index.get_task(add_task.uid()).await;
|
||||
assert_eq!(code, 200);
|
||||
// TODO check response format, as per #48
|
||||
@@ -50,10 +47,11 @@ async fn get_task_status() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
@@ -64,6 +62,7 @@ async fn list_tasks() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_pagination_and_reverse() {
|
||||
// do not use a shared server here, as we want to assert tasks ids and we need them to be stable
|
||||
let server = Server::new().await;
|
||||
// First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes
|
||||
let mut last_task = None;
|
||||
@@ -71,7 +70,7 @@ async fn list_tasks_pagination_and_reverse() {
|
||||
let index = server.index(format!("test-{i}"));
|
||||
last_task = Some(index.create(None).await.0.uid());
|
||||
}
|
||||
server.wait_task(last_task.unwrap()).await;
|
||||
server.wait_task(last_task.unwrap()).await.succeeded();
|
||||
|
||||
let (response, code) = server.tasks_filter("limit=3").await;
|
||||
assert_eq!(code, 200);
|
||||
@@ -103,13 +102,14 @@ async fn list_tasks_pagination_and_reverse() {
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_with_star_filters() {
|
||||
let server = Server::new().await;
|
||||
// Do not use a unique index here, as we want to test the `indexUids=*` filter.
|
||||
let index = server.index("test");
|
||||
let (task, _code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
let (response, code) = index.service.get("/tasks?indexUids=test").await;
|
||||
let (response, code) = index.service.get(format!("/tasks?indexUids={}", index.uid)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
@@ -127,93 +127,102 @@ async fn list_tasks_with_star_filters() {
|
||||
|
||||
let (response, code) =
|
||||
index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(code, 200, "{response:?}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test")
|
||||
.get(format!(
|
||||
"/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}",
|
||||
index.uid
|
||||
))
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(code, 200, "{response:?}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) = index
|
||||
.service
|
||||
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*")
|
||||
.await;
|
||||
assert_eq!(code, 200, "{:?}", response);
|
||||
assert_eq!(code, 200, "{response:?}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_filtered() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_type_filtered() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 1);
|
||||
|
||||
let (response, code) =
|
||||
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_invalid_canceled_by_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _code) = index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
let (response, code) =
|
||||
index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_tasks_status_and_type_filtered() {
|
||||
// Do not use a shared server because we want to assert stuff against the global list of tasks
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
index
|
||||
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
|
||||
.await;
|
||||
|
||||
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 0);
|
||||
|
||||
let (response, code) = index
|
||||
@@ -223,12 +232,12 @@ async fn list_tasks_status_and_type_filtered() {
|
||||
&[],
|
||||
)
|
||||
.await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
macro_rules! assert_valid_summarized_task {
|
||||
($response:expr, $task_type:literal, $index:literal) => {{
|
||||
($response:expr, $task_type:literal, $index:tt) => {{
|
||||
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||
assert!($response["taskUid"].as_u64().is_some());
|
||||
assert_eq!($response["indexUid"], $index);
|
||||
@@ -242,49 +251,49 @@ macro_rules! assert_valid_summarized_task {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_task_view() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let index_uid = index.uid.clone();
|
||||
|
||||
let (response, _) = index.create(None).await;
|
||||
assert_valid_summarized_task!(response, "indexCreation", "test");
|
||||
assert_valid_summarized_task!(response, "indexCreation", index_uid);
|
||||
|
||||
let (response, _) = index.update(None).await;
|
||||
assert_valid_summarized_task!(response, "indexUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "indexUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.update_settings(json!({})).await;
|
||||
assert_valid_summarized_task!(response, "settingsUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "settingsUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
|
||||
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
|
||||
|
||||
let (response, _) = index.delete_document(1).await;
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
|
||||
|
||||
let (response, _) = index.clear_all_documents().await;
|
||||
assert_valid_summarized_task!(response, "documentDeletion", "test");
|
||||
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
|
||||
|
||||
let (response, _) = index.delete().await;
|
||||
assert_valid_summarized_task!(response, "indexDeletion", "test");
|
||||
assert_valid_summarized_task!(response, "indexDeletion", index_uid);
|
||||
}
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_document_addition_or_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) =
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(0).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -302,15 +311,14 @@ async fn test_summarized_document_addition_or_update() {
|
||||
|
||||
let (task, _status_code) =
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(1).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -329,18 +337,22 @@ async fn test_summarized_document_addition_or_update() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_delete_documents_by_batch() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(0).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let non_existing_task_id1 = u32::MAX as u64;
|
||||
let non_existing_task_id2 = non_existing_task_id1 - 1;
|
||||
let non_existing_task_id3 = non_existing_task_id1 - 2;
|
||||
let (task, _status_code) = index
|
||||
.delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3])
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -350,7 +362,7 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
"originalFilter": null
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -364,15 +376,14 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
|
||||
index.create(None).await;
|
||||
let (del_task, _status_code) = index.delete_batch(vec![42]).await;
|
||||
index.wait_task(del_task.uid()).await.succeeded();
|
||||
server.wait_task(del_task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(del_task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -392,20 +403,19 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_delete_documents_by_filter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -415,7 +425,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
"originalFilter": "\"doggo = bernese\""
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -430,15 +440,14 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
index.create(None).await;
|
||||
let (task, _status_code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -448,7 +457,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
"originalFilter": "\"doggo = bernese\""
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
|
||||
"message": "Index `[uuid]`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
|
||||
"code": "invalid_document_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
|
||||
@@ -463,15 +472,14 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
|
||||
let (task, _status_code) =
|
||||
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 4,
|
||||
"batchUid": 4,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -491,18 +499,17 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_delete_document_by_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.delete_document(1).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -512,7 +519,7 @@ async fn test_summarized_delete_document_by_id() {
|
||||
"originalFilter": null
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -526,15 +533,14 @@ async fn test_summarized_delete_document_by_id() {
|
||||
|
||||
index.create(None).await;
|
||||
let (task, _status_code) = index.delete_document(42).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 2,
|
||||
"batchUid": 2,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -554,12 +560,12 @@ async fn test_summarized_delete_document_by_id() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_settings_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// here we should find my payload even in the failed task.
|
||||
let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await;
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
|
||||
"code": "invalid_settings_ranking_rules",
|
||||
@@ -569,15 +575,14 @@ async fn test_summarized_settings_update() {
|
||||
"###);
|
||||
|
||||
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -605,18 +610,17 @@ async fn test_summarized_settings_update() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_creation() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexCreation",
|
||||
"canceledBy": null,
|
||||
@@ -632,15 +636,14 @@ async fn test_summarized_index_creation() {
|
||||
"###);
|
||||
|
||||
let (task, _status_code) = index.create(Some("doggos")).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexCreation",
|
||||
"canceledBy": null,
|
||||
@@ -648,7 +651,7 @@ async fn test_summarized_index_creation() {
|
||||
"primaryKey": "doggos"
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` already exists.",
|
||||
"message": "Index `[uuid]` already exists.",
|
||||
"code": "index_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_already_exists"
|
||||
@@ -663,16 +666,16 @@ async fn test_summarized_index_creation() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_deletion() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (ret, _code) = index.delete().await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -680,7 +683,7 @@ async fn test_summarized_index_deletion() {
|
||||
"deletedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -697,13 +700,13 @@ async fn test_summarized_index_deletion() {
|
||||
// both tasks may get autobatched and the deleted documents count will be wrong.
|
||||
let (ret, _code) =
|
||||
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -720,13 +723,13 @@ async fn test_summarized_index_deletion() {
|
||||
"###);
|
||||
|
||||
let (ret, _code) = index.delete().await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -743,13 +746,13 @@ async fn test_summarized_index_deletion() {
|
||||
|
||||
// What happens when you delete an index that doesn't exists.
|
||||
let (ret, _code) = index.delete().await;
|
||||
let task = index.wait_task(ret.uid()).await;
|
||||
let task = server.wait_task(ret.uid()).await;
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "test",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexDeletion",
|
||||
"canceledBy": null,
|
||||
@@ -757,7 +760,7 @@ async fn test_summarized_index_deletion() {
|
||||
"deletedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -772,19 +775,18 @@ async fn test_summarized_index_deletion() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// If the index doesn't exist yet, we should get errors with or without the primary key.
|
||||
let (task, _status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -792,7 +794,7 @@ async fn test_summarized_index_update() {
|
||||
"primaryKey": null
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -805,15 +807,14 @@ async fn test_summarized_index_update() {
|
||||
"###);
|
||||
|
||||
let (task, _status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -821,7 +822,7 @@ async fn test_summarized_index_update() {
|
||||
"primaryKey": "bones"
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `test` not found.",
|
||||
"message": "Index `[uuid]` not found.",
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@@ -837,15 +838,14 @@ async fn test_summarized_index_update() {
|
||||
index.create(None).await;
|
||||
|
||||
let (task, _status_code) = index.update(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -861,15 +861,14 @@ async fn test_summarized_index_update() {
|
||||
"###);
|
||||
|
||||
let (task, _status_code) = index.update(Some("bones")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 4,
|
||||
"batchUid": 4,
|
||||
"indexUid": "test",
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "indexUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -887,7 +886,7 @@ async fn test_summarized_index_update() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_index_swap() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let (task, _status_code) = server
|
||||
.index_swap(json!([
|
||||
{ "indexes": ["doggos", "cattos"] }
|
||||
@@ -895,12 +894,11 @@ async fn test_summarized_index_swap() {
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.failed();
|
||||
let (task, _) = server.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "failed",
|
||||
"type": "indexSwap",
|
||||
@@ -928,23 +926,25 @@ async fn test_summarized_index_swap() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (task, _code) = server.index("doggos").create(None).await;
|
||||
let doggos_index = server.unique_index();
|
||||
let (task, _code) = doggos_index.create(None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _code) = server.index("cattos").create(None).await;
|
||||
let cattos_index = server.unique_index();
|
||||
let (task, _code) = cattos_index.create(None).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _code) = server
|
||||
.index_swap(json!([
|
||||
{ "indexes": ["doggos", "cattos"] }
|
||||
{ "indexes": [doggos_index.uid, cattos_index.uid] }
|
||||
]))
|
||||
.await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = server.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(json_string!(task,
|
||||
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.indexes[0]" => "doggos", ".**.indexes[1]" => "cattos", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
"uid": 3,
|
||||
"batchUid": 3,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "indexSwap",
|
||||
@@ -970,20 +970,21 @@ async fn test_summarized_index_swap() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_task_cancelation() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggos");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// to avoid being flaky we're only going to cancel an already finished task :(
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = server.cancel_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let task_uid = task.uid();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(json_string!(task,
|
||||
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "taskCancelation",
|
||||
@@ -991,7 +992,7 @@ async fn test_summarized_task_cancelation() {
|
||||
"details": {
|
||||
"matchedTasks": 1,
|
||||
"canceledTasks": 0,
|
||||
"originalFilter": "?uids=0"
|
||||
"originalFilter": "[of]"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
@@ -1004,20 +1005,19 @@ async fn test_summarized_task_cancelation() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_task_deletion() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggos");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
// to avoid being flaky we're only going to delete an already finished task :(
|
||||
let (task, _status_code) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _status_code) = server.delete_tasks("uids=0").await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (task, _) = index.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 1,
|
||||
"batchUid": 1,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "taskDeletion",
|
||||
@@ -1038,22 +1038,22 @@ async fn test_summarized_task_deletion() {
|
||||
|
||||
#[actix_web::test]
|
||||
async fn test_summarized_dump_creation() {
|
||||
// Do not use a shared server because it takes too long to create a dump
|
||||
let server = Server::new().await;
|
||||
let (task, _status_code) = server.create_dump().await;
|
||||
server.wait_task(task.uid()).await;
|
||||
let (task, _) = server.get_task(task.uid()).await;
|
||||
assert_json_snapshot!(task,
|
||||
{ ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
|
||||
snapshot!(task,
|
||||
@r###"
|
||||
{
|
||||
"uid": 0,
|
||||
"batchUid": 0,
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": null,
|
||||
"status": "succeeded",
|
||||
"type": "dumpCreation",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"dumpUid": "[dumpUid]"
|
||||
"dumpUid": "[dump_uid]"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -6,8 +6,8 @@ use crate::vector::generate_default_user_provided_documents;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -65,8 +65,8 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn binary_quantize_before_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -139,8 +139,8 @@ async fn binary_quantize_before_sending_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn binary_quantize_after_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -226,8 +226,8 @@ async fn binary_quantize_after_sending_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn try_to_disable_binary_quantization() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@@ -256,11 +256,11 @@ async fn try_to_disable_binary_quantization() {
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let ret = server.wait_task(response.uid()).await;
|
||||
snapshot!(ret, @r#"
|
||||
snapshot!(json_string!(ret, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".finishedAt" => "[date]", ".startedAt" => "[date]" }), @r#"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "doggo",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
@@ -274,7 +274,7 @@ async fn try_to_disable_binary_quantization() {
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
|
||||
"message": "Index `[uuid]`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
|
||||
"code": "invalid_settings_embedders",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"
|
||||
|
||||
@@ -50,7 +50,7 @@ impl AttributePatterns {
|
||||
///
|
||||
/// * `pattern` - The pattern to match against.
|
||||
/// * `str` - The string to match against the pattern.
|
||||
fn match_pattern(pattern: &str, str: &str) -> PatternMatch {
|
||||
pub fn match_pattern(pattern: &str, str: &str) -> PatternMatch {
|
||||
// If the pattern is a wildcard, return Match
|
||||
if pattern == "*" {
|
||||
return PatternMatch::Match;
|
||||
|
||||
423
crates/milli/src/bin/embedder_settings.rs
Normal file
423
crates/milli/src/bin/embedder_settings.rs
Normal file
@@ -0,0 +1,423 @@
|
||||
use std::io::Write;
|
||||
|
||||
use milli::vector::settings::{
|
||||
EmbedderSource, EmbeddingSettings, FieldStatus, MetaEmbeddingSetting, NestingContext,
|
||||
ReindexOutcome,
|
||||
};
|
||||
|
||||
pub trait Formatter {
|
||||
fn begin_document(&mut self);
|
||||
fn end_document(&mut self);
|
||||
|
||||
fn begin_header(&mut self);
|
||||
fn put_source_header(&mut self, source: EmbedderSource);
|
||||
fn end_header(&mut self);
|
||||
|
||||
fn begin_setting(
|
||||
&mut self,
|
||||
setting: MetaEmbeddingSetting,
|
||||
description: &'static str,
|
||||
kind: &'static str,
|
||||
reindex_outcome: ReindexOutcome,
|
||||
default_value: &'static str,
|
||||
);
|
||||
fn end_setting(&mut self, setting: MetaEmbeddingSetting);
|
||||
|
||||
fn put_setting_status(
|
||||
&mut self,
|
||||
source: EmbedderSource,
|
||||
field_status_by_nesting_context: FieldStatusByNestingContext,
|
||||
);
|
||||
}
|
||||
|
||||
pub struct GitHubMdFormatter<W> {
|
||||
w: W,
|
||||
}
|
||||
|
||||
impl<W: Write> GitHubMdFormatter<W> {
|
||||
pub fn new(w: W) -> Self {
|
||||
Self { w }
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write> Formatter for GitHubMdFormatter<W> {
|
||||
fn begin_document(&mut self) {
|
||||
let s = r#"
|
||||
|
||||
<table>
|
||||
<tbody>
|
||||
"#;
|
||||
write!(self.w, "{s}").unwrap()
|
||||
}
|
||||
|
||||
fn end_document(&mut self) {
|
||||
write!(
|
||||
self.w,
|
||||
r#"
|
||||
</tbody>
|
||||
</table>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn begin_header(&mut self) {
|
||||
write!(
|
||||
self.w,
|
||||
r#"
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Setting</th>
|
||||
<th>Description</th>
|
||||
<th>Type</th>
|
||||
<th>Default Value</th>
|
||||
<th>Regenerate on Change</th>
|
||||
<th colspan="6">Availability for source</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<th colspan="5"></th>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
fn put_source_header(&mut self, source: EmbedderSource) {
|
||||
write!(
|
||||
self.w,
|
||||
r#"
|
||||
<th>
|
||||
|
||||
{source}
|
||||
|
||||
</th>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
fn end_header(&mut self) {
|
||||
write!(
|
||||
self.w,
|
||||
r#"
|
||||
</tr>
|
||||
</thead>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn begin_setting(
|
||||
&mut self,
|
||||
setting: MetaEmbeddingSetting,
|
||||
description: &'static str,
|
||||
kind: &'static str,
|
||||
reindex_outcome: ReindexOutcome,
|
||||
default_value: &'static str,
|
||||
) {
|
||||
let name = setting.name();
|
||||
let reindex_outcome = match reindex_outcome {
|
||||
ReindexOutcome::AlwaysReindex => "🏗️ Always",
|
||||
ReindexOutcome::NeverReindex => "🌱 Never",
|
||||
ReindexOutcome::ReindexSometimes(sometimes) => sometimes,
|
||||
};
|
||||
write!(
|
||||
self.w,
|
||||
r#"
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`{name}`
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
{description}
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
{kind}
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
{default_value}
|
||||
|
||||
</td>
|
||||
<td>
|
||||
|
||||
{reindex_outcome}
|
||||
|
||||
</td>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn end_setting(&mut self, _setting: MetaEmbeddingSetting) {
|
||||
write!(
|
||||
self.w,
|
||||
r#"
|
||||
|
||||
|
||||
</tr>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn put_setting_status(
|
||||
&mut self,
|
||||
_source: EmbedderSource,
|
||||
field_status_by_nesting_context: FieldStatusByNestingContext,
|
||||
) {
|
||||
let field_status = match field_status_by_nesting_context {
|
||||
FieldStatusByNestingContext::Invariant(field_status) => {
|
||||
format_field_status(field_status).to_string()
|
||||
}
|
||||
FieldStatusByNestingContext::Variant(variant_field_status_by_nesting_context) => {
|
||||
format!(
|
||||
r#"
|
||||
- Usually, {}
|
||||
- When used in `searchEmbedder` in a `composite` embedder, {}
|
||||
- When used in `indexingEmbedder` in a `composite` embedder, {}
|
||||
"#,
|
||||
format_field_status(variant_field_status_by_nesting_context.not_nested),
|
||||
format_field_status(variant_field_status_by_nesting_context.search),
|
||||
format_field_status(variant_field_status_by_nesting_context.index)
|
||||
)
|
||||
}
|
||||
};
|
||||
write!(
|
||||
self.w,
|
||||
r#"
|
||||
<td>
|
||||
|
||||
{field_status}
|
||||
|
||||
</td>
|
||||
"#
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
fn format_field_status(field_status: FieldStatus) -> &'static str {
|
||||
match field_status {
|
||||
FieldStatus::Mandatory => "🔐 **Mandatory**",
|
||||
FieldStatus::Allowed => "✅ Allowed",
|
||||
FieldStatus::Disallowed => "🚫 Disallowed",
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GitHubMdAvailabilityFormatter<W>(pub GitHubMdFormatter<W>);
|
||||
impl<W: Write> Formatter for GitHubMdAvailabilityFormatter<W> {
|
||||
fn begin_document(&mut self) {
|
||||
write!(self.0.w, "## Availability of the settings depending on the selected source\n\n")
|
||||
.unwrap();
|
||||
self.0.begin_document();
|
||||
}
|
||||
|
||||
fn end_document(&mut self) {
|
||||
self.0.end_document();
|
||||
}
|
||||
|
||||
fn begin_header(&mut self) {
|
||||
write!(
|
||||
self.0.w,
|
||||
r#"
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Setting</th>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn put_source_header(&mut self, source: EmbedderSource) {
|
||||
self.0.put_source_header(source);
|
||||
}
|
||||
|
||||
fn end_header(&mut self) {
|
||||
self.0.end_header();
|
||||
}
|
||||
|
||||
fn begin_setting(
|
||||
&mut self,
|
||||
setting: MetaEmbeddingSetting,
|
||||
_description: &'static str,
|
||||
_kind: &'static str,
|
||||
_reindex_outcome: ReindexOutcome,
|
||||
_default_value: &'static str,
|
||||
) {
|
||||
if setting == MetaEmbeddingSetting::Source {
|
||||
return;
|
||||
}
|
||||
let name = setting.name();
|
||||
write!(
|
||||
self.0.w,
|
||||
r#"
|
||||
<tr>
|
||||
<td>
|
||||
|
||||
`{name}`
|
||||
|
||||
</td>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn end_setting(&mut self, setting: MetaEmbeddingSetting) {
|
||||
if setting == MetaEmbeddingSetting::Source {
|
||||
return;
|
||||
}
|
||||
self.0.end_setting(setting);
|
||||
}
|
||||
|
||||
fn put_setting_status(
|
||||
&mut self,
|
||||
source: EmbedderSource,
|
||||
field_status_by_nesting_context: FieldStatusByNestingContext,
|
||||
) {
|
||||
self.0.put_setting_status(source, field_status_by_nesting_context);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GitHubMdBasicFormatter<W>(pub GitHubMdFormatter<W>);
|
||||
impl<W: Write> Formatter for GitHubMdBasicFormatter<W> {
|
||||
fn begin_document(&mut self) {
|
||||
write!(self.0.w, "## List of the embedder settings\n\n").unwrap();
|
||||
self.0.begin_document();
|
||||
}
|
||||
|
||||
fn end_document(&mut self) {
|
||||
self.0.end_document();
|
||||
}
|
||||
|
||||
fn begin_header(&mut self) {
|
||||
write!(
|
||||
self.0.w,
|
||||
r#"
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Setting</th>
|
||||
<th>Description</th>
|
||||
<th>Type</th>
|
||||
<th>Default Value</th>
|
||||
<th>Regenerate on Change</th>
|
||||
"#
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn put_source_header(&mut self, _source: EmbedderSource) {}
|
||||
|
||||
fn end_header(&mut self) {
|
||||
self.0.end_header();
|
||||
}
|
||||
|
||||
fn begin_setting(
|
||||
&mut self,
|
||||
setting: MetaEmbeddingSetting,
|
||||
description: &'static str,
|
||||
kind: &'static str,
|
||||
reindex_outcome: ReindexOutcome,
|
||||
default_value: &'static str,
|
||||
) {
|
||||
self.0.begin_setting(setting, description, kind, reindex_outcome, default_value);
|
||||
}
|
||||
|
||||
fn end_setting(&mut self, setting: MetaEmbeddingSetting) {
|
||||
self.0.end_setting(setting);
|
||||
}
|
||||
|
||||
fn put_setting_status(
|
||||
&mut self,
|
||||
_source: EmbedderSource,
|
||||
_field_status_by_nesting_context: FieldStatusByNestingContext,
|
||||
) {
|
||||
}
|
||||
}
|
||||
|
||||
pub enum FieldStatusByNestingContext {
|
||||
Invariant(FieldStatus),
|
||||
Variant(VariantFieldStatusByNestingContext),
|
||||
}
|
||||
|
||||
pub struct VariantFieldStatusByNestingContext {
|
||||
not_nested: FieldStatus,
|
||||
search: FieldStatus,
|
||||
index: FieldStatus,
|
||||
}
|
||||
|
||||
fn format_settings(mut fmt: impl Formatter) {
|
||||
#![allow(unused_labels)] // the labels are used as documentation
|
||||
fmt.begin_document();
|
||||
fmt.begin_header();
|
||||
for source in enum_iterator::all::<EmbedderSource>() {
|
||||
fmt.put_source_header(source);
|
||||
}
|
||||
fmt.end_header();
|
||||
'setting: for setting in enum_iterator::all::<MetaEmbeddingSetting>() {
|
||||
let description = setting.description();
|
||||
let kind = setting.kind();
|
||||
let reindex_outcome = setting.reindex_outcome();
|
||||
let default_value = setting.default_value();
|
||||
fmt.begin_setting(setting, description, kind, reindex_outcome, default_value);
|
||||
|
||||
'source: for source in enum_iterator::all::<EmbedderSource>() {
|
||||
if setting == MetaEmbeddingSetting::Source {
|
||||
break 'source;
|
||||
}
|
||||
let mut field_status = VariantFieldStatusByNestingContext {
|
||||
not_nested: FieldStatus::Disallowed,
|
||||
search: FieldStatus::Disallowed,
|
||||
index: FieldStatus::Disallowed,
|
||||
};
|
||||
'nesting: for nesting_context in enum_iterator::all::<NestingContext>() {
|
||||
let status = EmbeddingSettings::field_status(source, setting, nesting_context);
|
||||
|
||||
match nesting_context {
|
||||
NestingContext::NotNested => {
|
||||
field_status.not_nested = status;
|
||||
}
|
||||
NestingContext::Search => {
|
||||
field_status.search = status;
|
||||
}
|
||||
NestingContext::Indexing => {
|
||||
field_status.index = status;
|
||||
}
|
||||
}
|
||||
}
|
||||
let field_status_by_nesting_context = if field_status.index == field_status.search
|
||||
&& field_status.search == field_status.not_nested
|
||||
{
|
||||
FieldStatusByNestingContext::Invariant(field_status.not_nested)
|
||||
} else {
|
||||
FieldStatusByNestingContext::Variant(field_status)
|
||||
};
|
||||
fmt.put_setting_status(source, field_status_by_nesting_context);
|
||||
}
|
||||
fmt.end_setting(setting);
|
||||
}
|
||||
fmt.end_document();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let mut std_out = std::io::stdout().lock();
|
||||
|
||||
write!(
|
||||
&mut std_out,
|
||||
"The tables below have been generated by calling `cargo run --bin embedder_settings`\n\n"
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let formatter = GitHubMdFormatter::new(&mut std_out);
|
||||
let formatter = GitHubMdBasicFormatter(formatter);
|
||||
format_settings(formatter);
|
||||
|
||||
write!(&mut std_out, "\n\n").unwrap();
|
||||
|
||||
let formatter = GitHubMdFormatter::new(&mut std_out);
|
||||
let formatter = GitHubMdAvailabilityFormatter(formatter);
|
||||
format_settings(formatter);
|
||||
}
|
||||
@@ -32,13 +32,13 @@ impl ExternalDocumentsIds {
|
||||
&self,
|
||||
rtxn: &RoTxn<'_>,
|
||||
external_id: A,
|
||||
) -> heed::Result<Option<DocumentId>> {
|
||||
) -> heed::Result<Option<u32>> {
|
||||
self.0.get(rtxn, external_id.as_ref())
|
||||
}
|
||||
|
||||
/// An helper function to debug this type, returns an `HashMap` of both,
|
||||
/// soft and hard fst maps, combined.
|
||||
pub fn to_hash_map(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashMap<String, DocumentId>> {
|
||||
pub fn to_hash_map(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashMap<String, u32>> {
|
||||
let mut map = HashMap::default();
|
||||
for result in self.0.iter(rtxn)? {
|
||||
let (external, internal) = result?;
|
||||
|
||||
@@ -7,7 +7,6 @@ use crate::FieldId;
|
||||
mod global;
|
||||
pub mod metadata;
|
||||
pub use global::GlobalFieldsIdsMap;
|
||||
pub use metadata::{FieldIdMapWithMetadata, MetadataBuilder};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FieldsIdsMap {
|
||||
|
||||
@@ -23,7 +23,6 @@ use crate::heed_codec::facet::{
|
||||
use crate::heed_codec::version::VersionCodec;
|
||||
use crate::heed_codec::{BEU16StrCodec, FstSetCodec, StrBEU16Codec, StrRefCodec};
|
||||
use crate::order_by_map::OrderByMap;
|
||||
use crate::prompt::PromptData;
|
||||
use crate::proximity::ProximityPrecision;
|
||||
use crate::vector::{ArroyStats, ArroyWrapper, Embedding, EmbeddingConfig};
|
||||
use crate::{
|
||||
@@ -80,7 +79,6 @@ pub mod main_key {
|
||||
pub const PREFIX_SEARCH: &str = "prefix_search";
|
||||
pub const DOCUMENTS_STATS: &str = "documents_stats";
|
||||
pub const DISABLED_TYPOS_TERMS: &str = "disabled_typos_terms";
|
||||
pub const CHAT: &str = "chat";
|
||||
}
|
||||
|
||||
pub mod db_name {
|
||||
@@ -1693,25 +1691,6 @@ impl Index {
|
||||
self.main.remap_key_type::<Str>().delete(txn, main_key::FACET_SEARCH)
|
||||
}
|
||||
|
||||
pub fn chat_config(&self, txn: &RoTxn<'_>) -> heed::Result<ChatConfig> {
|
||||
self.main
|
||||
.remap_types::<Str, SerdeBincode<_>>()
|
||||
.get(txn, main_key::CHAT)
|
||||
.map(|o| o.unwrap_or_default())
|
||||
}
|
||||
|
||||
pub(crate) fn put_chat_config(
|
||||
&self,
|
||||
txn: &mut RwTxn<'_>,
|
||||
val: &ChatConfig,
|
||||
) -> heed::Result<()> {
|
||||
self.main.remap_types::<Str, SerdeBincode<_>>().put(txn, main_key::CHAT, &val)
|
||||
}
|
||||
|
||||
pub(crate) fn delete_chat_config(&self, txn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||
self.main.remap_key_type::<Str>().delete(txn, main_key::CHAT)
|
||||
}
|
||||
|
||||
pub fn localized_attributes_rules(
|
||||
&self,
|
||||
rtxn: &RoTxn<'_>,
|
||||
@@ -1938,13 +1917,6 @@ pub struct IndexEmbeddingConfig {
|
||||
pub user_provided: RoaringBitmap,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Deserialize, Serialize)]
|
||||
pub struct ChatConfig {
|
||||
pub description: String,
|
||||
/// Contains the document template and max template length.
|
||||
pub prompt: PromptData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
pub struct PrefixSettings {
|
||||
pub prefix_count_threshold: usize,
|
||||
|
||||
@@ -52,19 +52,18 @@ pub use search::new::{
|
||||
};
|
||||
use serde_json::Value;
|
||||
pub use thread_pool_no_abort::{PanicCatched, ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
|
||||
pub use {arroy, charabia as tokenizer, heed, rhai};
|
||||
pub use {charabia as tokenizer, heed, rhai};
|
||||
|
||||
pub use self::asc_desc::{AscDesc, AscDescError, Member, SortError};
|
||||
pub use self::attribute_patterns::{AttributePatterns, PatternMatch};
|
||||
pub use self::attribute_patterns::AttributePatterns;
|
||||
pub use self::attribute_patterns::PatternMatch;
|
||||
pub use self::criterion::{default_criteria, Criterion, CriterionError};
|
||||
pub use self::error::{
|
||||
Error, FieldIdMapMissingEntry, InternalError, SerializationError, UserError,
|
||||
};
|
||||
pub use self::external_documents_ids::ExternalDocumentsIds;
|
||||
pub use self::fieldids_weights_map::FieldidsWeightsMap;
|
||||
pub use self::fields_ids_map::{
|
||||
FieldIdMapWithMetadata, FieldsIdsMap, GlobalFieldsIdsMap, MetadataBuilder,
|
||||
};
|
||||
pub use self::fields_ids_map::{FieldsIdsMap, GlobalFieldsIdsMap};
|
||||
pub use self::filterable_attributes_rules::{
|
||||
FilterFeatures, FilterableAttributesFeatures, FilterableAttributesPatterns,
|
||||
FilterableAttributesRule,
|
||||
@@ -85,6 +84,8 @@ pub use self::search::{
|
||||
};
|
||||
pub use self::update::ChannelCongestion;
|
||||
|
||||
pub use arroy;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, error::Error>;
|
||||
|
||||
pub type Attribute = u32;
|
||||
|
||||
@@ -65,7 +65,7 @@ fn default_template() -> liquid::Template {
|
||||
new_template(default_template_text()).unwrap()
|
||||
}
|
||||
|
||||
fn default_template_text() -> &'static str {
|
||||
pub(crate) fn default_template_text() -> &'static str {
|
||||
"{% for field in fields %}\
|
||||
{% if field.is_searchable and field.value != nil %}\
|
||||
{{ field.name }}: {{ field.value }}\n\
|
||||
@@ -105,10 +105,10 @@ impl Prompt {
|
||||
max_bytes,
|
||||
};
|
||||
|
||||
// // render template with special object that's OK with `doc.*` and `fields.*`
|
||||
// this.template
|
||||
// .render(&template_checker::TemplateChecker)
|
||||
// .map_err(NewPromptError::invalid_fields_in_template)?;
|
||||
// render template with special object that's OK with `doc.*` and `fields.*`
|
||||
this.template
|
||||
.render(&template_checker::TemplateChecker)
|
||||
.map_err(NewPromptError::invalid_fields_in_template)?;
|
||||
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use heed::RoTxn;
|
||||
use itertools::Itertools;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::score_details::{ScoreDetails, ScoreValue, ScoringStrategy};
|
||||
use crate::search::new::{distinct_fid, distinct_single_docid};
|
||||
use crate::search::SemanticSearch;
|
||||
use crate::{MatchingWords, Result, Search, SearchResult};
|
||||
use crate::{Index, MatchingWords, Result, Search, SearchResult};
|
||||
|
||||
struct ScoreWithRatioResult {
|
||||
matching_words: MatchingWords,
|
||||
@@ -91,7 +93,10 @@ impl ScoreWithRatioResult {
|
||||
keyword_results: Self,
|
||||
from: usize,
|
||||
length: usize,
|
||||
) -> (SearchResult, u32) {
|
||||
distinct: Option<&str>,
|
||||
index: &Index,
|
||||
rtxn: &RoTxn<'_>,
|
||||
) -> Result<(SearchResult, u32)> {
|
||||
#[derive(Clone, Copy)]
|
||||
enum ResultSource {
|
||||
Semantic,
|
||||
@@ -106,8 +111,9 @@ impl ScoreWithRatioResult {
|
||||
vector_results.document_scores.len() + keyword_results.document_scores.len(),
|
||||
);
|
||||
|
||||
let mut documents_seen = RoaringBitmap::new();
|
||||
for ((docid, (main_score, _sub_score)), source) in vector_results
|
||||
let distinct_fid = distinct_fid(distinct, index, rtxn)?;
|
||||
let mut excluded_documents = RoaringBitmap::new();
|
||||
for res in vector_results
|
||||
.document_scores
|
||||
.into_iter()
|
||||
.zip(std::iter::repeat(ResultSource::Semantic))
|
||||
@@ -121,13 +127,33 @@ impl ScoreWithRatioResult {
|
||||
compare_scores(left, right).is_ge()
|
||||
},
|
||||
)
|
||||
// remove documents we already saw
|
||||
.filter(|((docid, _), _)| documents_seen.insert(*docid))
|
||||
// remove documents we already saw and apply distinct rule
|
||||
.filter_map(|item @ ((docid, _), _)| {
|
||||
if !excluded_documents.insert(docid) {
|
||||
// the document was already added, or is indistinct from an already-added document.
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(distinct_fid) = distinct_fid {
|
||||
if let Err(error) = distinct_single_docid(
|
||||
index,
|
||||
rtxn,
|
||||
distinct_fid,
|
||||
docid,
|
||||
&mut excluded_documents,
|
||||
) {
|
||||
return Some(Err(error));
|
||||
}
|
||||
}
|
||||
|
||||
Some(Ok(item))
|
||||
})
|
||||
// start skipping **after** the filter
|
||||
.skip(from)
|
||||
// take **after** skipping
|
||||
.take(length)
|
||||
{
|
||||
let ((docid, (main_score, _sub_score)), source) = res?;
|
||||
if let ResultSource::Semantic = source {
|
||||
semantic_hit_count += 1;
|
||||
}
|
||||
@@ -136,10 +162,24 @@ impl ScoreWithRatioResult {
|
||||
document_scores.push(main_score);
|
||||
}
|
||||
|
||||
(
|
||||
// compute the set of candidates from both sets
|
||||
let candidates = vector_results.candidates | keyword_results.candidates;
|
||||
let must_remove_redundant_candidates = distinct_fid.is_some();
|
||||
let candidates = if must_remove_redundant_candidates {
|
||||
// patch-up the candidates to remove the indistinct documents, then add back the actual hits
|
||||
let mut candidates = candidates - excluded_documents;
|
||||
for docid in &documents_ids {
|
||||
candidates.insert(*docid);
|
||||
}
|
||||
candidates
|
||||
} else {
|
||||
candidates
|
||||
};
|
||||
|
||||
Ok((
|
||||
SearchResult {
|
||||
matching_words: keyword_results.matching_words,
|
||||
candidates: vector_results.candidates | keyword_results.candidates,
|
||||
candidates,
|
||||
documents_ids,
|
||||
document_scores,
|
||||
degraded: vector_results.degraded | keyword_results.degraded,
|
||||
@@ -147,7 +187,7 @@ impl ScoreWithRatioResult {
|
||||
| keyword_results.used_negative_operator,
|
||||
},
|
||||
semantic_hit_count,
|
||||
)
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -226,8 +266,15 @@ impl Search<'_> {
|
||||
let keyword_results = ScoreWithRatioResult::new(keyword_results, 1.0 - semantic_ratio);
|
||||
let vector_results = ScoreWithRatioResult::new(vector_results, semantic_ratio);
|
||||
|
||||
let (merge_results, semantic_hit_count) =
|
||||
ScoreWithRatioResult::merge(vector_results, keyword_results, self.offset, self.limit);
|
||||
let (merge_results, semantic_hit_count) = ScoreWithRatioResult::merge(
|
||||
vector_results,
|
||||
keyword_results,
|
||||
self.offset,
|
||||
self.limit,
|
||||
search.distinct.as_deref(),
|
||||
search.index,
|
||||
search.rtxn,
|
||||
)?;
|
||||
assert!(merge_results.documents_ids.len() <= self.limit);
|
||||
Ok((merge_results, Some(semantic_hit_count)))
|
||||
}
|
||||
|
||||
@@ -4,7 +4,9 @@ use super::logger::SearchLogger;
|
||||
use super::ranking_rules::{BoxRankingRule, RankingRuleQueryTrait};
|
||||
use super::SearchContext;
|
||||
use crate::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use crate::search::new::distinct::{apply_distinct_rule, distinct_single_docid, DistinctOutput};
|
||||
use crate::search::new::distinct::{
|
||||
apply_distinct_rule, distinct_fid, distinct_single_docid, DistinctOutput,
|
||||
};
|
||||
use crate::{Result, TimeBudget};
|
||||
|
||||
pub struct BucketSortOutput {
|
||||
@@ -35,16 +37,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
logger.ranking_rules(&ranking_rules);
|
||||
logger.initial_universe(universe);
|
||||
|
||||
let distinct_field = match distinct {
|
||||
Some(distinct) => Some(distinct),
|
||||
None => ctx.index.distinct_field(ctx.txn)?,
|
||||
};
|
||||
|
||||
let distinct_fid = if let Some(field) = distinct_field {
|
||||
ctx.index.fields_ids_map(ctx.txn)?.id(field)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let distinct_fid = distinct_fid(distinct, ctx.index, ctx.txn)?;
|
||||
|
||||
if universe.len() < from as u64 {
|
||||
return Ok(BucketSortOutput {
|
||||
|
||||
@@ -9,7 +9,7 @@ use crate::heed_codec::facet::{
|
||||
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec, FieldDocIdFacetCodec,
|
||||
};
|
||||
use crate::heed_codec::BytesRefCodec;
|
||||
use crate::{Index, Result, SearchContext};
|
||||
use crate::{FieldId, Index, Result, SearchContext};
|
||||
|
||||
pub struct DistinctOutput {
|
||||
pub remaining: RoaringBitmap,
|
||||
@@ -121,3 +121,18 @@ pub fn facet_string_values<'a>(
|
||||
fn facet_values_prefix_key(distinct: u16, id: u32) -> [u8; FID_SIZE + DOCID_SIZE] {
|
||||
concat_arrays::concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes())
|
||||
}
|
||||
|
||||
pub fn distinct_fid(
|
||||
query_distinct_field: Option<&str>,
|
||||
index: &Index,
|
||||
rtxn: &RoTxn<'_>,
|
||||
) -> Result<Option<FieldId>> {
|
||||
let distinct_field = match query_distinct_field {
|
||||
Some(distinct) => Some(distinct),
|
||||
None => index.distinct_field(rtxn)?,
|
||||
};
|
||||
|
||||
let distinct_fid =
|
||||
if let Some(field) = distinct_field { index.fields_ids_map(rtxn)?.id(field) } else { None };
|
||||
Ok(distinct_fid)
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ use std::time::Duration;
|
||||
use bucket_sort::{bucket_sort, BucketSortOutput};
|
||||
use charabia::{Language, TokenizerBuilder};
|
||||
use db_cache::DatabaseCache;
|
||||
pub use distinct::{distinct_fid, distinct_single_docid};
|
||||
use exact_attribute::ExactAttribute;
|
||||
use graph_based_ranking_rule::{Exactness, Fid, Position, Proximity, Typo};
|
||||
use heed::RoTxn;
|
||||
@@ -47,11 +48,11 @@ use sort::Sort;
|
||||
|
||||
use self::distinct::facet_string_values;
|
||||
use self::geo_sort::GeoSort;
|
||||
pub use self::geo_sort::Parameter as GeoSortParameter;
|
||||
pub use self::geo_sort::Strategy as GeoSortStrategy;
|
||||
pub use self::geo_sort::{Parameter as GeoSortParameter, Strategy as GeoSortStrategy};
|
||||
use self::graph_based_ranking_rule::Words;
|
||||
use self::interner::Interned;
|
||||
use self::vector_sort::VectorSort;
|
||||
use crate::attribute_patterns::{match_pattern, PatternMatch};
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::index::PrefixSearch;
|
||||
use crate::localized_attributes_rules::LocalizedFieldIds;
|
||||
@@ -120,17 +121,37 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
let searchable_fields_weights = self.index.searchable_fields_and_weights(self.txn)?;
|
||||
let exact_attributes_ids = self.index.exact_attributes_ids(self.txn)?;
|
||||
|
||||
let mut wildcard = false;
|
||||
let mut universal_wildcard = false;
|
||||
|
||||
let mut restricted_fids = RestrictedFids::default();
|
||||
for field_name in attributes_to_search_on {
|
||||
if field_name == "*" {
|
||||
wildcard = true;
|
||||
universal_wildcard = true;
|
||||
// we cannot early exit as we want to returns error in case of unknown fields
|
||||
continue;
|
||||
}
|
||||
let searchable_weight =
|
||||
searchable_fields_weights.iter().find(|(name, _, _)| name == field_name);
|
||||
|
||||
// The field is not searchable but may contain a wildcard pattern
|
||||
if searchable_weight.is_none() && field_name.contains("*") {
|
||||
let matching_searchable_weights: Vec<_> = searchable_fields_weights
|
||||
.iter()
|
||||
.filter(|(name, _, _)| match_pattern(field_name, name) == PatternMatch::Match)
|
||||
.collect();
|
||||
|
||||
if !matching_searchable_weights.is_empty() {
|
||||
for (_name, fid, weight) in matching_searchable_weights {
|
||||
if exact_attributes_ids.contains(fid) {
|
||||
restricted_fids.exact.push((*fid, *weight));
|
||||
} else {
|
||||
restricted_fids.tolerant.push((*fid, *weight));
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let (fid, weight) = match searchable_weight {
|
||||
// The Field id exist and the field is searchable
|
||||
Some((_name, fid, weight)) => (*fid, *weight),
|
||||
@@ -160,7 +181,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
};
|
||||
}
|
||||
|
||||
if wildcard {
|
||||
if universal_wildcard {
|
||||
self.restricted_fids = None;
|
||||
} else {
|
||||
self.restricted_fids = Some(restricted_fids);
|
||||
|
||||
@@ -92,12 +92,12 @@ fn find_one_typo_derivations(
|
||||
let mut stream = fst.search_with_state(Intersection(starts, &dfa)).into_stream();
|
||||
|
||||
while let Some((derived_word, state)) = stream.next() {
|
||||
let derived_word = std::str::from_utf8(derived_word)?;
|
||||
let derived_word = ctx.word_interner.insert(derived_word.to_owned());
|
||||
let d = dfa.distance(state.1);
|
||||
match d.to_u8() {
|
||||
0 => (),
|
||||
1 => {
|
||||
let derived_word = std::str::from_utf8(derived_word)?;
|
||||
let derived_word = ctx.word_interner.insert(derived_word.to_owned());
|
||||
let cf = visit(derived_word)?;
|
||||
if cf.is_break() {
|
||||
break;
|
||||
|
||||
@@ -72,7 +72,7 @@ fn test_2gram_simple() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -103,7 +103,7 @@ fn test_3gram_simple() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -153,7 +153,7 @@ fn test_no_disable_ngrams() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -179,7 +179,7 @@ fn test_2gram_prefix() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -208,7 +208,7 @@ fn test_3gram_prefix() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -260,7 +260,7 @@ fn test_disable_split_words() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -151,7 +151,7 @@ fn test_no_typo() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -19,10 +19,7 @@ use crate::update::{
|
||||
};
|
||||
use crate::vector::settings::{EmbedderSource, EmbeddingSettings};
|
||||
use crate::vector::EmbeddingConfigs;
|
||||
use crate::{
|
||||
db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult,
|
||||
ThreadPoolNoAbortBuilder,
|
||||
};
|
||||
use crate::{db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult};
|
||||
|
||||
pub(crate) struct TempIndex {
|
||||
pub inner: Index,
|
||||
@@ -62,15 +59,8 @@ impl TempIndex {
|
||||
wtxn: &mut RwTxn<'t>,
|
||||
documents: Mmap,
|
||||
) -> Result<(), crate::error::Error> {
|
||||
let local_pool;
|
||||
let indexer_config = &self.indexer_config;
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let rtxn = self.inner.read_txn()?;
|
||||
let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?;
|
||||
@@ -153,15 +143,8 @@ impl TempIndex {
|
||||
wtxn: &mut RwTxn<'t>,
|
||||
external_document_ids: Vec<String>,
|
||||
) -> Result<(), crate::error::Error> {
|
||||
let local_pool;
|
||||
let indexer_config = &self.indexer_config;
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let rtxn = self.inner.read_txn()?;
|
||||
let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?;
|
||||
@@ -231,15 +214,8 @@ fn aborting_indexation() {
|
||||
let mut wtxn = index.inner.write_txn().unwrap();
|
||||
let should_abort = AtomicBool::new(false);
|
||||
|
||||
let local_pool;
|
||||
let indexer_config = &index.indexer_config;
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let rtxn = index.inner.read_txn().unwrap();
|
||||
let db_fields_ids_map = index.inner.fields_ids_map(&rtxn).unwrap();
|
||||
|
||||
@@ -54,6 +54,10 @@ impl ThreadPoolNoAbortBuilder {
|
||||
ThreadPoolNoAbortBuilder::default()
|
||||
}
|
||||
|
||||
pub fn new_for_indexing() -> ThreadPoolNoAbortBuilder {
|
||||
ThreadPoolNoAbortBuilder::default().thread_name(|index| format!("indexing-thread:{index}"))
|
||||
}
|
||||
|
||||
pub fn thread_name<F>(mut self, closure: F) -> Self
|
||||
where
|
||||
F: FnMut(usize) -> String + 'static,
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
use deserr::Deserr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::index::ChatConfig;
|
||||
use crate::prompt::{default_max_bytes, PromptData};
|
||||
use crate::update::Setting;
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase)]
|
||||
pub struct ChatSettings {
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub description: Setting<String>,
|
||||
|
||||
/// A liquid template used to render documents to a text that can be embedded.
|
||||
///
|
||||
/// Meillisearch interpolates the template for each document and sends the resulting text to the embedder.
|
||||
/// The embedder then generates document vectors based on this text.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub document_template: Setting<String>,
|
||||
|
||||
/// Rendered texts are truncated to this size. Defaults to 400.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<usize>)]
|
||||
pub document_template_max_bytes: Setting<usize>,
|
||||
}
|
||||
|
||||
impl From<ChatConfig> for ChatSettings {
|
||||
fn from(config: ChatConfig) -> Self {
|
||||
let ChatConfig { description, prompt: PromptData { template, max_bytes } } = config;
|
||||
ChatSettings {
|
||||
description: Setting::Set(description),
|
||||
document_template: Setting::Set(template),
|
||||
document_template_max_bytes: Setting::Set(
|
||||
max_bytes.unwrap_or(default_max_bytes()).get(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -33,7 +33,6 @@ use crate::documents::{obkv_to_object, DocumentsBatchReader};
|
||||
use crate::error::{Error, InternalError};
|
||||
use crate::index::{PrefixSearch, PrefixSettings};
|
||||
use crate::progress::Progress;
|
||||
use crate::thread_pool_no_abort::ThreadPoolNoAbortBuilder;
|
||||
pub use crate::update::index_documents::helpers::CursorClonableMmap;
|
||||
use crate::update::{
|
||||
IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst,
|
||||
@@ -228,24 +227,7 @@ where
|
||||
let possible_embedding_mistakes =
|
||||
crate::vector::error::PossibleEmbeddingMistakes::new(&field_distribution);
|
||||
|
||||
let backup_pool;
|
||||
let pool = match self.indexer_config.thread_pool {
|
||||
Some(ref pool) => pool,
|
||||
None => {
|
||||
// We initialize a backup pool with the default
|
||||
// settings if none have already been set.
|
||||
#[allow(unused_mut)]
|
||||
let mut pool_builder = ThreadPoolNoAbortBuilder::new();
|
||||
|
||||
#[cfg(test)]
|
||||
{
|
||||
pool_builder = pool_builder.num_threads(1);
|
||||
}
|
||||
|
||||
backup_pool = pool_builder.build()?;
|
||||
&backup_pool
|
||||
}
|
||||
};
|
||||
let pool = &self.indexer_config.thread_pool;
|
||||
|
||||
// create LMDB writer channel
|
||||
let (lmdb_writer_sx, lmdb_writer_rx): (
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use grenad::CompressionType;
|
||||
|
||||
use super::GrenadParameters;
|
||||
use crate::thread_pool_no_abort::ThreadPoolNoAbort;
|
||||
use crate::{thread_pool_no_abort::ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexerConfig {
|
||||
@@ -9,9 +9,10 @@ pub struct IndexerConfig {
|
||||
pub max_nb_chunks: Option<usize>,
|
||||
pub documents_chunk_size: Option<usize>,
|
||||
pub max_memory: Option<usize>,
|
||||
pub max_threads: Option<usize>,
|
||||
pub chunk_compression_type: CompressionType,
|
||||
pub chunk_compression_level: Option<u32>,
|
||||
pub thread_pool: Option<ThreadPoolNoAbort>,
|
||||
pub thread_pool: ThreadPoolNoAbort,
|
||||
pub max_positions_per_attributes: Option<u32>,
|
||||
pub skip_index_budget: bool,
|
||||
}
|
||||
@@ -27,16 +28,39 @@ impl IndexerConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// By default use only 1 thread for indexing in tests
|
||||
#[cfg(test)]
|
||||
pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option<usize>) {
|
||||
let pool = ThreadPoolNoAbortBuilder::new_for_indexing()
|
||||
.num_threads(1)
|
||||
.build()
|
||||
.expect("failed to build default rayon thread pool");
|
||||
|
||||
(pool, Some(1))
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option<usize>) {
|
||||
let pool = ThreadPoolNoAbortBuilder::new_for_indexing()
|
||||
.build()
|
||||
.expect("failed to build default rayon thread pool");
|
||||
|
||||
(pool, None)
|
||||
}
|
||||
|
||||
impl Default for IndexerConfig {
|
||||
fn default() -> Self {
|
||||
let (thread_pool, max_threads) = default_thread_pool_and_threads();
|
||||
|
||||
Self {
|
||||
max_threads,
|
||||
thread_pool,
|
||||
log_every_n: None,
|
||||
max_nb_chunks: None,
|
||||
documents_chunk_size: None,
|
||||
max_memory: None,
|
||||
chunk_compression_type: CompressionType::None,
|
||||
chunk_compression_level: None,
|
||||
thread_pool: None,
|
||||
max_positions_per_attributes: None,
|
||||
skip_index_budget: false,
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
pub use self::available_ids::AvailableIds;
|
||||
pub use self::chat::ChatSettings;
|
||||
pub use self::clear_documents::ClearDocuments;
|
||||
pub use self::concurrent_available_ids::ConcurrentAvailableIds;
|
||||
pub use self::facet::bulk::FacetsUpdateBulk;
|
||||
pub use self::facet::incremental::FacetsUpdateIncrementalInner;
|
||||
pub use self::index_documents::*;
|
||||
pub use self::indexer_config::IndexerConfig;
|
||||
pub use self::indexer_config::{default_thread_pool_and_threads, IndexerConfig};
|
||||
pub use self::new::ChannelCongestion;
|
||||
pub use self::settings::{validate_embedding_settings, Setting, Settings};
|
||||
pub use self::update_step::UpdateIndexingStep;
|
||||
@@ -14,7 +13,6 @@ pub use self::words_prefix_integer_docids::WordPrefixIntegerDocids;
|
||||
pub use self::words_prefixes_fst::WordsPrefixesFst;
|
||||
|
||||
mod available_ids;
|
||||
mod chat;
|
||||
mod clear_documents;
|
||||
mod concurrent_available_ids;
|
||||
pub(crate) mod del_add;
|
||||
|
||||
@@ -8,7 +8,7 @@ use hashbrown::HashMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::super::cache::BalancedCaches;
|
||||
use super::facet_document::extract_document_facets;
|
||||
use super::facet_document::{extract_document_facets, extract_geo_document};
|
||||
use super::FacetKind;
|
||||
use crate::fields_ids_map::metadata::Metadata;
|
||||
use crate::filterable_attributes_rules::match_faceted_field;
|
||||
@@ -90,17 +90,12 @@ impl FacetedDocidsExtractor {
|
||||
let mut cached_sorter = context.data.borrow_mut_or_yield();
|
||||
let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc);
|
||||
let docid = document_change.docid();
|
||||
let res = match document_change {
|
||||
DocumentChange::Deletion(inner) => extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
|
||||
// Using a macro avoid borrowing the parameters as mutable in both closures at
|
||||
// the same time by postponing their creation
|
||||
macro_rules! facet_fn {
|
||||
(del) => {
|
||||
|fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
@@ -114,91 +109,10 @@ impl FacetedDocidsExtractor {
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
),
|
||||
DocumentChange::Update(inner) => {
|
||||
let has_changed = inner.has_changed_for_fields(
|
||||
&mut |field_name| {
|
||||
match_faceted_field(
|
||||
field_name,
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
)
|
||||
},
|
||||
rtxn,
|
||||
index,
|
||||
context.db_fields_ids_map,
|
||||
)?;
|
||||
let has_changed_for_geo_fields =
|
||||
inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?;
|
||||
if !has_changed && !has_changed_for_geo_fields {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
BalancedCaches::insert_del_u32,
|
||||
&mut del_add_facet_value,
|
||||
DelAddFacetValue::insert_del,
|
||||
docid,
|
||||
fid,
|
||||
meta,
|
||||
filterable_attributes,
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
)?;
|
||||
|
||||
extract_document_facets(
|
||||
inner.merged(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
BalancedCaches::insert_add_u32,
|
||||
&mut del_add_facet_value,
|
||||
DelAddFacetValue::insert_add,
|
||||
docid,
|
||||
fid,
|
||||
meta,
|
||||
filterable_attributes,
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
DocumentChange::Insertion(inner) => extract_document_facets(
|
||||
inner.inserted(),
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
};
|
||||
(add) => {
|
||||
|fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
@@ -212,12 +126,116 @@ impl FacetedDocidsExtractor {
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
match document_change {
|
||||
DocumentChange::Deletion(inner) => {
|
||||
let mut del = facet_fn!(del);
|
||||
|
||||
extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut del,
|
||||
)?;
|
||||
|
||||
if is_geo_enabled {
|
||||
extract_geo_document(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut del,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
DocumentChange::Update(inner) => {
|
||||
let has_changed_for_facets = inner.has_changed_for_fields(
|
||||
&mut |field_name| {
|
||||
match_faceted_field(
|
||||
field_name,
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
)
|
||||
},
|
||||
rtxn,
|
||||
index,
|
||||
context.db_fields_ids_map,
|
||||
)?;
|
||||
|
||||
// 1. Maybe update doc
|
||||
if has_changed_for_facets {
|
||||
extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut facet_fn!(del),
|
||||
)?;
|
||||
|
||||
extract_document_facets(
|
||||
inner.merged(rtxn, index, context.db_fields_ids_map)?,
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut facet_fn!(add),
|
||||
)?;
|
||||
}
|
||||
|
||||
// 2. Maybe update geo
|
||||
if is_geo_enabled
|
||||
&& inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?
|
||||
{
|
||||
extract_geo_document(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut facet_fn!(del),
|
||||
)?;
|
||||
extract_geo_document(
|
||||
inner.merged(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut facet_fn!(add),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
DocumentChange::Insertion(inner) => {
|
||||
let mut add = facet_fn!(add);
|
||||
|
||||
extract_document_facets(
|
||||
inner.inserted(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut add,
|
||||
)?;
|
||||
|
||||
if is_geo_enabled {
|
||||
extract_geo_document(
|
||||
inner.inserted(),
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut add,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
del_add_facet_value.send_data(docid, sender, &context.doc_alloc).unwrap();
|
||||
res
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
||||
@@ -16,13 +16,11 @@ use crate::filterable_attributes_rules::match_faceted_field;
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn extract_document_facets<'doc>(
|
||||
document: impl Document<'doc>,
|
||||
external_document_id: &str,
|
||||
field_id_map: &mut GlobalFieldsIdsMap,
|
||||
filterable_attributes: &[FilterableAttributesRule],
|
||||
sortable_fields: &HashSet<String>,
|
||||
asc_desc_fields: &HashSet<String>,
|
||||
distinct_field: &Option<String>,
|
||||
is_geo_enabled: bool,
|
||||
facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
// return the match result for the given field name.
|
||||
@@ -102,17 +100,24 @@ pub fn extract_document_facets<'doc>(
|
||||
}
|
||||
}
|
||||
|
||||
if is_geo_enabled {
|
||||
if let Some(geo_value) = document.geo_field()? {
|
||||
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
|
||||
let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map
|
||||
.id_with_metadata_or_insert("_geo.lat")
|
||||
.zip(field_id_map.id_with_metadata_or_insert("_geo.lng"))
|
||||
.ok_or(UserError::AttributeLimitReached)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?;
|
||||
facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?;
|
||||
}
|
||||
pub fn extract_geo_document<'doc>(
|
||||
document: impl Document<'doc>,
|
||||
external_document_id: &str,
|
||||
field_id_map: &mut GlobalFieldsIdsMap,
|
||||
facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
if let Some(geo_value) = document.geo_field()? {
|
||||
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
|
||||
let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map
|
||||
.id_with_metadata_or_insert("_geo.lat")
|
||||
.zip(field_id_map.id_with_metadata_or_insert("_geo.lng"))
|
||||
.ok_or(UserError::AttributeLimitReached)?;
|
||||
|
||||
facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?;
|
||||
facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ use time::OffsetDateTime;
|
||||
|
||||
use super::del_add::{DelAdd, DelAddOperation};
|
||||
use super::index_documents::{IndexDocumentsConfig, Transform};
|
||||
use super::{ChatSettings, IndexerConfig};
|
||||
use super::IndexerConfig;
|
||||
use crate::attribute_patterns::PatternMatch;
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::criterion::Criterion;
|
||||
@@ -22,11 +22,11 @@ use crate::error::UserError;
|
||||
use crate::fields_ids_map::metadata::{FieldIdMapWithMetadata, MetadataBuilder};
|
||||
use crate::filterable_attributes_rules::match_faceted_field;
|
||||
use crate::index::{
|
||||
ChatConfig, IndexEmbeddingConfig, PrefixSearch, DEFAULT_MIN_WORD_LEN_ONE_TYPO,
|
||||
IndexEmbeddingConfig, PrefixSearch, DEFAULT_MIN_WORD_LEN_ONE_TYPO,
|
||||
DEFAULT_MIN_WORD_LEN_TWO_TYPOS,
|
||||
};
|
||||
use crate::order_by_map::OrderByMap;
|
||||
use crate::prompt::{default_max_bytes, PromptData};
|
||||
use crate::prompt::default_max_bytes;
|
||||
use crate::proximity::ProximityPrecision;
|
||||
use crate::update::index_documents::IndexDocumentsMethod;
|
||||
use crate::update::{IndexDocuments, UpdateIndexingStep};
|
||||
@@ -185,7 +185,6 @@ pub struct Settings<'a, 't, 'i> {
|
||||
localized_attributes_rules: Setting<Vec<LocalizedAttributesRule>>,
|
||||
prefix_search: Setting<PrefixSearch>,
|
||||
facet_search: Setting<bool>,
|
||||
chat: Setting<ChatSettings>,
|
||||
}
|
||||
|
||||
impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
@@ -224,7 +223,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
localized_attributes_rules: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
chat: Setting::NotSet,
|
||||
indexer_config,
|
||||
}
|
||||
}
|
||||
@@ -335,7 +333,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.primary_key = Setting::Set(primary_key);
|
||||
}
|
||||
|
||||
pub fn set_autorize_typos(&mut self, val: bool) {
|
||||
pub fn set_authorize_typos(&mut self, val: bool) {
|
||||
self.authorize_typos = Setting::Set(val);
|
||||
}
|
||||
|
||||
@@ -455,14 +453,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.facet_search = Setting::Reset;
|
||||
}
|
||||
|
||||
pub fn set_chat(&mut self, value: ChatSettings) {
|
||||
self.chat = Setting::Set(value);
|
||||
}
|
||||
|
||||
pub fn reset_chat(&mut self) {
|
||||
self.chat = Setting::Reset;
|
||||
}
|
||||
|
||||
#[tracing::instrument(
|
||||
level = "trace"
|
||||
skip(self, progress_callback, should_abort, settings_diff),
|
||||
@@ -1249,45 +1239,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_chat_config(&mut self) -> heed::Result<bool> {
|
||||
match &mut self.chat {
|
||||
Setting::Set(ChatSettings {
|
||||
description: new_description,
|
||||
document_template: new_document_template,
|
||||
document_template_max_bytes: new_document_template_max_bytes,
|
||||
}) => {
|
||||
let mut old = self.index.chat_config(self.wtxn)?;
|
||||
let ChatConfig {
|
||||
ref mut description,
|
||||
prompt: PromptData { ref mut template, ref mut max_bytes },
|
||||
} = old;
|
||||
|
||||
match new_description {
|
||||
Setting::Set(d) => *description = d.clone(),
|
||||
Setting::Reset => *description = Default::default(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match new_document_template {
|
||||
Setting::Set(dt) => *template = dt.clone(),
|
||||
Setting::Reset => *template = Default::default(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match new_document_template_max_bytes {
|
||||
Setting::Set(m) => *max_bytes = NonZeroUsize::new(*m),
|
||||
Setting::Reset => *max_bytes = Some(default_max_bytes()),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
self.index.put_chat_config(self.wtxn, &old)?;
|
||||
Ok(true)
|
||||
}
|
||||
Setting::Reset => self.index.delete_chat_config(self.wtxn),
|
||||
Setting::NotSet => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute<FP, FA>(mut self, progress_callback: FP, should_abort: FA) -> Result<()>
|
||||
where
|
||||
FP: Fn(UpdateIndexingStep) + Sync,
|
||||
@@ -1325,7 +1276,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.update_facet_search()?;
|
||||
self.update_localized_attributes_rules()?;
|
||||
self.update_disabled_typos_terms()?;
|
||||
self.update_chat_config()?;
|
||||
|
||||
let embedding_config_updates = self.update_embedding_configs()?;
|
||||
|
||||
|
||||
@@ -792,7 +792,7 @@ fn test_disable_typo() {
|
||||
|
||||
index
|
||||
.update_settings_using_wtxn(&mut txn, |settings| {
|
||||
settings.set_autorize_typos(false);
|
||||
settings.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -33,7 +33,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - Defaults to `openAi`
|
||||
pub source: Setting<EmbedderSource>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -56,7 +55,6 @@ pub struct EmbeddingSettings {
|
||||
/// - For source `openAi`, defaults to `text-embedding-3-small`
|
||||
/// - For source `huggingFace`, defaults to `BAAI/bge-base-en-v1.5`
|
||||
pub model: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -77,7 +75,6 @@ pub struct EmbeddingSettings {
|
||||
/// - When `model` is set to default, defaults to `617ca489d9e86b49b8167676d8220688b99db36e`
|
||||
/// - Otherwise, defaults to `null`
|
||||
pub revision: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<OverridePooling>)]
|
||||
@@ -99,7 +96,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - Embedders created before this parameter was available default to `forceMean` to preserve the existing behavior.
|
||||
pub pooling: Setting<OverridePooling>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -122,7 +118,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - This setting is partially hidden when returned by the settings
|
||||
pub api_key: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -146,7 +141,6 @@ pub struct EmbeddingSettings {
|
||||
/// - For source `openAi`, the dimensions is the maximum allowed by the model.
|
||||
/// - For sources `ollama` and `rest`, the dimensions are inferred by embedding a sample text.
|
||||
pub dimensions: Setting<usize>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<bool>)]
|
||||
@@ -173,7 +167,6 @@ pub struct EmbeddingSettings {
|
||||
/// first enabling it. If you are unsure of whether the performance-relevancy tradeoff is right for you,
|
||||
/// we recommend to use this parameter on a test index first.
|
||||
pub binary_quantized: Setting<bool>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<bool>)]
|
||||
@@ -190,7 +183,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - 🏗️ When modified, embeddings are regenerated for documents whose rendering through the template produces a different text.
|
||||
pub document_template: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<usize>)]
|
||||
@@ -209,7 +201,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - Defaults to 400
|
||||
pub document_template_max_bytes: Setting<usize>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -228,7 +219,6 @@ pub struct EmbeddingSettings {
|
||||
/// - 🌱 When modified for source `openAi`, embeddings are never regenerated
|
||||
/// - 🏗️ When modified for sources `ollama` and `rest`, embeddings are always regenerated
|
||||
pub url: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<serde_json::Value>)]
|
||||
@@ -246,7 +236,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - 🏗️ Changing the value of this parameter always regenerates embeddings
|
||||
pub request: Setting<serde_json::Value>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<serde_json::Value>)]
|
||||
@@ -264,7 +253,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - 🏗️ Changing the value of this parameter always regenerates embeddings
|
||||
pub response: Setting<serde_json::Value>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<BTreeMap<String, String>>)]
|
||||
@@ -1077,13 +1065,14 @@ fn apply_default_for_source(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) enum FieldStatus {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum FieldStatus {
|
||||
Mandatory,
|
||||
Allowed,
|
||||
Disallowed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[derive(Debug, Clone, Copy, enum_iterator::Sequence)]
|
||||
pub enum NestingContext {
|
||||
NotNested,
|
||||
Search,
|
||||
@@ -1120,7 +1109,7 @@ impl NestingContext {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, enum_iterator::Sequence)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, enum_iterator::Sequence)]
|
||||
pub enum MetaEmbeddingSetting {
|
||||
Source,
|
||||
Model,
|
||||
@@ -1140,8 +1129,14 @@ pub enum MetaEmbeddingSetting {
|
||||
BinaryQuantized,
|
||||
}
|
||||
|
||||
pub enum ReindexOutcome {
|
||||
AlwaysReindex,
|
||||
NeverReindex,
|
||||
ReindexSometimes(&'static str),
|
||||
}
|
||||
|
||||
impl MetaEmbeddingSetting {
|
||||
pub(crate) fn name(&self) -> &'static str {
|
||||
pub fn name(&self) -> &'static str {
|
||||
use MetaEmbeddingSetting::*;
|
||||
match self {
|
||||
Source => "source",
|
||||
@@ -1162,6 +1157,159 @@ impl MetaEmbeddingSetting {
|
||||
BinaryQuantized => "binaryQuantized",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn description(&self) -> &'static str {
|
||||
use MetaEmbeddingSetting::*;
|
||||
match self {
|
||||
Source => {
|
||||
r#"
|
||||
The source used to provide the embeddings.
|
||||
|
||||
Which embedder parameters are available and mandatory is determined by the value of this setting.
|
||||
"#
|
||||
}
|
||||
Model => "The name of the model to use.",
|
||||
Revision => {
|
||||
r#"
|
||||
The revision (commit SHA1) of the model to use.
|
||||
|
||||
If unspecified, Meilisearch picks the latest revision of the model.
|
||||
"#
|
||||
}
|
||||
Pooling => "The pooling method to use.",
|
||||
ApiKey => "The API key to pass to the remote embedder while making requests.",
|
||||
Dimensions => "The expected dimensions of the embeddings produced by this embedder.",
|
||||
DocumentTemplate => {
|
||||
r#"
|
||||
A liquid template used to render documents to a text that can be embedded.
|
||||
|
||||
Meillisearch interpolates the template for each document and sends the resulting text to the embedder.
|
||||
The embedder then generates document vectors based on this text.
|
||||
"#
|
||||
}
|
||||
DocumentTemplateMaxBytes => {
|
||||
"Rendered texts are truncated to this size before embedding."
|
||||
}
|
||||
Url => "URL to reach the remote embedder.",
|
||||
Request => "Template request to send to the remote embedder.",
|
||||
Response => "Template response indicating how to find the embeddings in the response from the remote embedder.",
|
||||
Headers => "Additional headers to send to the remote embedder.",
|
||||
SearchEmbedder => "Embedder settings for the embedder used at search time.",
|
||||
IndexingEmbedder => "Embedder settings for the embedder used at indexing time.",
|
||||
Distribution => "Affine transformation applied to the semantic score to make it more comparable to the ranking score.",
|
||||
BinaryQuantized => r#"
|
||||
Whether to binary quantize the embeddings of this embedder.
|
||||
|
||||
Binary quantized embeddings are smaller than regular embeddings, which improves
|
||||
disk usage and retrieval speed, at the cost of relevancy.
|
||||
"#,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> &'static str {
|
||||
use MetaEmbeddingSetting::*;
|
||||
match self {
|
||||
Source => {
|
||||
r#""openAi" | "huggingFace" | "userProvided" | "ollama" | "rest" | "composite""#
|
||||
}
|
||||
Model => "string",
|
||||
Revision => "string",
|
||||
Pooling => r#""useModel" | "forceCls" | "forceMean""#,
|
||||
ApiKey => "string",
|
||||
Dimensions => "number",
|
||||
DocumentTemplate => "string",
|
||||
DocumentTemplateMaxBytes => "number",
|
||||
Url => "string",
|
||||
Request => "any",
|
||||
Response => "any",
|
||||
Headers => "object",
|
||||
SearchEmbedder => "object",
|
||||
IndexingEmbedder => "object",
|
||||
Distribution => "object",
|
||||
BinaryQuantized => "boolean",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_value(&self) -> &'static str {
|
||||
use MetaEmbeddingSetting::*;
|
||||
match self {
|
||||
Source => r#""openAi""#,
|
||||
Model => {
|
||||
r#"
|
||||
- For source `openAi`, defaults to "text-embedding-3-small"
|
||||
- For source `huggingFace`, defaults to "BAAI/bge-base-en-v1.5"
|
||||
"#
|
||||
}
|
||||
Revision => {
|
||||
r#"
|
||||
- When `model` is set to default, defaults to "617ca489d9e86b49b8167676d8220688b99db36e"
|
||||
- Otherwise, defaults to `null`
|
||||
"#
|
||||
}
|
||||
Pooling => r#""useModel""#,
|
||||
ApiKey => "`null`",
|
||||
Dimensions => "`null`",
|
||||
DocumentTemplate => crate::prompt::default_template_text(),
|
||||
DocumentTemplateMaxBytes => "400",
|
||||
Url => "`null`",
|
||||
Request => "`null`",
|
||||
Response => "`null`",
|
||||
Headers => "`null`",
|
||||
SearchEmbedder => "`null`",
|
||||
IndexingEmbedder => "`null`",
|
||||
Distribution => "`null`",
|
||||
BinaryQuantized => "`false`",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reindex_outcome(&self) -> ReindexOutcome {
|
||||
use MetaEmbeddingSetting::*;
|
||||
match self {
|
||||
Source => ReindexOutcome::AlwaysReindex,
|
||||
Model => ReindexOutcome::AlwaysReindex,
|
||||
Revision => ReindexOutcome::AlwaysReindex,
|
||||
Pooling => ReindexOutcome::AlwaysReindex,
|
||||
ApiKey => ReindexOutcome::NeverReindex,
|
||||
Dimensions => ReindexOutcome::ReindexSometimes(
|
||||
r#"
|
||||
- 🏗️ When the source is `openAi`, changing the value of this parameter always regenerates embeddings
|
||||
- 🌱 For other sources, changing the value of this parameter never regenerates embeddings
|
||||
"#,
|
||||
),
|
||||
DocumentTemplate => ReindexOutcome::ReindexSometimes(
|
||||
r#"
|
||||
- 🏗️ When modified, embeddings are regenerated for documents whose rendering through the template produces a different text.
|
||||
"#,
|
||||
),
|
||||
DocumentTemplateMaxBytes => ReindexOutcome::ReindexSometimes(
|
||||
r#"
|
||||
- 🏗️ When increased, embeddings are regenerated for documents whose rendering through the template produces a different text.
|
||||
- 🌱 When decreased, embeddings are never regenerated
|
||||
"#,
|
||||
),
|
||||
Url => ReindexOutcome::ReindexSometimes(
|
||||
r#"
|
||||
- 🌱 When modified for source `openAi`, embeddings are never regenerated
|
||||
- 🏗️ When modified for sources `ollama` and `rest`, embeddings are always regenerated
|
||||
"#,
|
||||
),
|
||||
Request => ReindexOutcome::AlwaysReindex,
|
||||
Response => ReindexOutcome::AlwaysReindex,
|
||||
Headers => ReindexOutcome::NeverReindex,
|
||||
SearchEmbedder => ReindexOutcome::NeverReindex,
|
||||
IndexingEmbedder => ReindexOutcome::ReindexSometimes(
|
||||
r#"
|
||||
- Embedding are regenerated when the setting modified in the indexing embedder require regeneration.
|
||||
"#,
|
||||
),
|
||||
Distribution => ReindexOutcome::NeverReindex,
|
||||
BinaryQuantized => ReindexOutcome::ReindexSometimes(
|
||||
r#"
|
||||
- Embeddings are not regenerated, but the binary quantization takes time during indexing.
|
||||
"#,
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EmbeddingSettings {
|
||||
@@ -1323,7 +1471,7 @@ impl EmbeddingSettings {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn field_status(
|
||||
pub fn field_status(
|
||||
source: EmbedderSource,
|
||||
field: MetaEmbeddingSetting,
|
||||
context: NestingContext,
|
||||
|
||||
5
docs/README.md
Normal file
5
docs/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
This directory is meant for auto-generated documentation for internal purposes.
|
||||
|
||||
Please refer to <https://meilisearch.com/docs> for the public documentation of Meilisearch.
|
||||
|
||||
- [Embedder settings auto-generated description](./embedder_settings.md)
|
||||
1398
docs/embedder_settings.md
Normal file
1398
docs/embedder_settings.md
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user