mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-18 20:30:47 +00:00
Compare commits
122 Commits
Author | SHA1 | Date | |
---|---|---|---|
1ef5b1a2a8 | |||
5c14a25d5a | |||
fda2843135 | |||
9347330f3a | |||
56c9190dab | |||
6b986dceaf | |||
ea6bb4df1d | |||
a3d2f64725 | |||
d5526cffff | |||
921e3c4ffe | |||
f80182f0a9 | |||
5efc78db55 | |||
20049669c9 | |||
db28d13cb1 | |||
790621dc29 | |||
1d577ae98b | |||
88e9a55d44 | |||
dbe551cf99 | |||
a299fbd33b | |||
193119acb9 | |||
4c71118699 | |||
5fe2943d3c | |||
86ff502327 | |||
6b1a345dce | |||
b54ece690b | |||
3ea167bade | |||
1158d6689f | |||
d9b0463a0b | |||
ae9899f179 | |||
308fd7128e | |||
27e7c00622 | |||
58207da934 | |||
fb8b832192 | |||
17207b5405 | |||
bd95503eba | |||
8b8b0d802c | |||
d329e86250 | |||
d416b3b390 | |||
54f5e74744 | |||
fd4b192a39 | |||
3c13feebf7 | |||
1811168b96 | |||
b06cc1e0a2 | |||
44f812c36d | |||
c8e77b5f25 | |||
283f516e15 | |||
b4ca0a8c98 | |||
b658e38acd | |||
f87e46cc16 | |||
65354b414a | |||
025df397c0 | |||
f77abc9dc8 | |||
7e9909ee45 | |||
43ec97fe45 | |||
02929e241b | |||
c13efde042 | |||
36f0a1492c | |||
ce65ad213b | |||
3e0de6cb83 | |||
f3d691667d | |||
ce9c930d10 | |||
fc88b003b4 | |||
cf5d26124a | |||
38b1c57fa8 | |||
25c525b057 | |||
83cd28b60b | |||
48cad4132a | |||
4897ad99d0 | |||
46ff78b4ec | |||
9ad43b6841 | |||
c9ec502ed9 | |||
18aed75d3b | |||
6738a4f6ee | |||
d2948adea3 | |||
f54b57e5be | |||
95821d0bde | |||
f690fa0686 | |||
24e94b28c1 | |||
34d58f35c8 | |||
1d5265caf4 | |||
97aeb6db4d | |||
f888f87635 | |||
8c8d98eeaa | |||
c5ae43cac6 | |||
57eecd6197 | |||
2fe5c78cb6 | |||
8047cfe438 | |||
5717e5c1af | |||
bb07038c31 | |||
d1a088ea0b | |||
b68e22c0e6 | |||
03a36f116e | |||
8a0bf24ed5 | |||
e2763471e5 | |||
b2f2c5d69f | |||
1594c54e23 | |||
13b607bd68 | |||
3d130d31c8 | |||
4cda584b0c | |||
248c90bad5 | |||
0e9040e605 | |||
3e3c00f44c | |||
d986a3bbaf | |||
c2ceb8e41b | |||
79db2e67fb | |||
865f24cfef | |||
3fbe1df770 | |||
150d1db86b | |||
806e983aa5 | |||
e96c1d4b0f | |||
15cdc6924b | |||
677e8b122c | |||
75a7e40a27 | |||
c8939944c6 | |||
4e6252fb03 | |||
8bd8e744f3 | |||
53f32a7dd7 | |||
47a7ed93d3 | |||
2ac826edca | |||
89aff2081c | |||
3b773b3416 | |||
648b2876f6 |
2699
Cargo.lock
generated
2699
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,7 @@ use meilisearch_types::milli::documents::PrimaryKey;
|
||||
use meilisearch_types::milli::progress::Progress;
|
||||
use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction};
|
||||
use meilisearch_types::milli::update::DocumentAdditionResult;
|
||||
use meilisearch_types::milli::{self, ChannelCongestion, Filter, ThreadPoolNoAbortBuilder};
|
||||
use meilisearch_types::milli::{self, ChannelCongestion, Filter};
|
||||
use meilisearch_types::settings::apply_settings_to_builder;
|
||||
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
|
||||
use meilisearch_types::Index;
|
||||
@ -113,18 +113,21 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
let network = self.network();
|
||||
let shards: Vec<&str> = network
|
||||
.local
|
||||
.as_deref()
|
||||
.into_iter()
|
||||
.chain(
|
||||
network
|
||||
.remotes
|
||||
.keys()
|
||||
.map(|s| s.as_str())
|
||||
.filter(|s| Some(s) != network.local.as_deref().as_ref()),
|
||||
)
|
||||
.collect();
|
||||
|
||||
progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges);
|
||||
let (document_changes, operation_stats, primary_key) = indexer
|
||||
@ -136,6 +139,7 @@ impl IndexScheduler {
|
||||
&mut new_fields_ids_map,
|
||||
&|| must_stop_processing.get(),
|
||||
progress.clone(),
|
||||
&shards,
|
||||
)
|
||||
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?;
|
||||
|
||||
@ -266,18 +270,8 @@ impl IndexScheduler {
|
||||
|
||||
let mut congestion = None;
|
||||
if task.error.is_none() {
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let candidates_count = candidates.len();
|
||||
progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges);
|
||||
@ -429,18 +423,8 @@ impl IndexScheduler {
|
||||
|
||||
let mut congestion = None;
|
||||
if !tasks.iter().all(|res| res.error.is_some()) {
|
||||
let local_pool;
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||
.build()
|
||||
.unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
progress.update_progress(DocumentDeletionProgress::DeleteDocuments);
|
||||
let mut indexer = indexer::DocumentDeletion::new();
|
||||
|
@ -15,3 +15,5 @@ license.workspace = true
|
||||
insta = { version = "=1.39.0", features = ["json", "redactions"] }
|
||||
md5 = "0.7.0"
|
||||
once_cell = "1.20"
|
||||
regex-lite = "0.1.6"
|
||||
uuid = { version = "1.17.0", features = ["v4"] }
|
||||
|
@ -4,9 +4,16 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub use insta;
|
||||
use insta::internals::{Content, ContentPath};
|
||||
use once_cell::sync::Lazy;
|
||||
use regex_lite::Regex;
|
||||
|
||||
static SNAPSHOT_NAMES: Lazy<Mutex<HashMap<PathBuf, usize>>> = Lazy::new(Mutex::default);
|
||||
/// A regex to match UUIDs in messages, specifically looking for the UUID v4 format
|
||||
static UUID_IN_MESSAGE_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}")
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
/// Return the md5 hash of the given string
|
||||
pub fn hash_snapshot(snap: &str) -> String {
|
||||
@ -26,6 +33,34 @@ pub fn default_snapshot_settings_for_test<'a>(
|
||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||
settings.set_omit_expression(true);
|
||||
|
||||
fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content {
|
||||
match &content {
|
||||
Content::String(s) => {
|
||||
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]");
|
||||
Content::String(uuid_replaced.to_string())
|
||||
}
|
||||
_ => content,
|
||||
}
|
||||
}
|
||||
|
||||
settings.add_dynamic_redaction(".message", uuid_in_message_redaction);
|
||||
settings.add_dynamic_redaction(".error.message", uuid_in_message_redaction);
|
||||
settings.add_dynamic_redaction(".indexUid", |content, _content_path| match &content {
|
||||
Content::String(s) => match uuid::Uuid::parse_str(s) {
|
||||
Ok(_) => Content::String("[uuid]".to_owned()),
|
||||
Err(_) => content,
|
||||
},
|
||||
_ => content,
|
||||
});
|
||||
|
||||
settings.add_dynamic_redaction(".error.message", |content, _content_path| match &content {
|
||||
Content::String(s) => {
|
||||
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "$before[uuid]$after");
|
||||
Content::String(uuid_replaced.to_string())
|
||||
}
|
||||
_ => content,
|
||||
});
|
||||
|
||||
let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name);
|
||||
let test_name = test_name.rsplit("::").next().unwrap().to_owned();
|
||||
|
||||
@ -232,6 +267,9 @@ macro_rules! json_string {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate as meili_snap;
|
||||
use crate::UUID_IN_MESSAGE_RE;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn snap() {
|
||||
snapshot_hash!(10, @"d3d9446802a44259755d38e6d163e820");
|
||||
@ -279,4 +317,14 @@ mod tests {
|
||||
// snapshot_hash!("", name: "", @"d41d8cd98f00b204e9800998ecf8427e");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uuid_in_message_regex() {
|
||||
let uuid1 = Uuid::new_v4();
|
||||
let uuid2 = Uuid::new_v4();
|
||||
let uuid3 = Uuid::new_v4();
|
||||
let to_replace = format!("1 {uuid1} 2 {uuid2} 3 {uuid3} 4");
|
||||
let replaced = UUID_IN_MESSAGE_RE.replace_all(to_replace.as_str(), "[uuid]");
|
||||
assert_eq!(replaced, "1 [uuid] 2 [uuid] 3 [uuid] 4");
|
||||
}
|
||||
}
|
||||
|
@ -666,7 +666,7 @@ pub fn apply_settings_to_builder(
|
||||
match typo_tolerance {
|
||||
Setting::Set(ref value) => {
|
||||
match value.enabled {
|
||||
Setting::Set(val) => builder.set_autorize_typos(val),
|
||||
Setting::Set(val) => builder.set_authorize_typos(val),
|
||||
Setting::Reset => builder.reset_authorize_typos(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] }
|
||||
actix-rt = "2.10.0"
|
||||
brotli = "6.0.0"
|
||||
# fixed version due to format breakages in v1.40
|
||||
insta = "=1.39.0"
|
||||
insta = { version = "=1.39.0", features = ["redactions"] }
|
||||
manifest-dir-macros = "0.1.18"
|
||||
maplit = "1.0.2"
|
||||
meili-snap = { path = "../meili-snap" }
|
||||
|
@ -37,7 +37,9 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
||||
use meilisearch_auth::{open_auth_store_env, AuthController};
|
||||
use meilisearch_types::milli::constants::VERSION_MAJOR;
|
||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
||||
use meilisearch_types::milli::update::{
|
||||
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
|
||||
};
|
||||
use meilisearch_types::settings::apply_settings_to_builder;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use meilisearch_types::versioning::{
|
||||
@ -500,7 +502,19 @@ fn import_dump(
|
||||
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
||||
index_scheduler.put_network(network)?;
|
||||
|
||||
let indexer_config = index_scheduler.indexer_config();
|
||||
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||
let backup_config;
|
||||
let base_config = index_scheduler.indexer_config();
|
||||
|
||||
let indexer_config = if base_config.max_threads.is_none() {
|
||||
let (thread_pool, _) = default_thread_pool_and_threads();
|
||||
|
||||
let _config = IndexerConfig { thread_pool, ..*base_config };
|
||||
backup_config = _config;
|
||||
&backup_config
|
||||
} else {
|
||||
base_config
|
||||
};
|
||||
|
||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||
// try to process tasks while we're trying to import the indexes.
|
||||
|
@ -746,10 +746,12 @@ impl IndexerOpts {
|
||||
max_indexing_memory.to_string(),
|
||||
);
|
||||
}
|
||||
export_to_env_if_not_present(
|
||||
MEILI_MAX_INDEXING_THREADS,
|
||||
max_indexing_threads.0.to_string(),
|
||||
);
|
||||
if let Some(max_indexing_threads) = max_indexing_threads.0 {
|
||||
export_to_env_if_not_present(
|
||||
MEILI_MAX_INDEXING_THREADS,
|
||||
max_indexing_threads.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -757,15 +759,15 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
|
||||
let thread_pool = ThreadPoolNoAbortBuilder::new()
|
||||
.thread_name(|index| format!("indexing-thread:{index}"))
|
||||
.num_threads(*other.max_indexing_threads)
|
||||
let thread_pool = ThreadPoolNoAbortBuilder::new_for_indexing()
|
||||
.num_threads(other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2))
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
thread_pool,
|
||||
log_every_n: Some(DEFAULT_LOG_EVERY_N),
|
||||
max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize),
|
||||
thread_pool: Some(thread_pool),
|
||||
max_threads: *other.max_indexing_threads,
|
||||
max_positions_per_attributes: None,
|
||||
skip_index_budget: other.skip_index_budget,
|
||||
..Default::default()
|
||||
@ -828,31 +830,31 @@ fn total_memory_bytes() -> Option<u64> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
pub struct MaxThreads(usize);
|
||||
#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
pub struct MaxThreads(Option<usize>);
|
||||
|
||||
impl FromStr for MaxThreads {
|
||||
type Err = ParseIntError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
usize::from_str(s).map(Self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MaxThreads {
|
||||
fn default() -> Self {
|
||||
MaxThreads(num_cpus::get() / 2)
|
||||
fn from_str(s: &str) -> Result<MaxThreads, Self::Err> {
|
||||
if s.is_empty() || s == "unlimited" {
|
||||
return Ok(MaxThreads::default());
|
||||
}
|
||||
usize::from_str(s).map(Some).map(MaxThreads)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for MaxThreads {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
match self.0 {
|
||||
Some(threads) => write!(f, "{}", threads),
|
||||
None => write!(f, "unlimited"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for MaxThreads {
|
||||
type Target = usize;
|
||||
type Target = Option<usize>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
|
@ -538,7 +538,7 @@ async fn error_add_api_key_parameters_uid_already_exist() {
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
{
|
||||
"message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.",
|
||||
"message": "`uid` field value `[uuid]` is already an existing API key.",
|
||||
"code": "api_key_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#api_key_already_exists"
|
||||
|
@ -29,6 +29,10 @@ impl<'a> Index<'a, Owned> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_encoder(&self, encoder: Encoder) -> Index<'a, Owned> {
|
||||
Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData }
|
||||
}
|
||||
|
||||
pub async fn load_test_set(&self) -> u64 {
|
||||
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
|
||||
let (response, code) = self
|
||||
@ -290,6 +294,20 @@ impl Index<'_, Shared> {
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
|
||||
pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._update(primary_key).await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`update_index_fail` succeeded: {}",
|
||||
serde_json::to_string_pretty(&task).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@ -333,6 +351,14 @@ impl<State> Index<'_, State> {
|
||||
self.service.post_encoded("/indexes", body, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let body = json!({
|
||||
"primaryKey": primary_key,
|
||||
});
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.patch_encoded(url, body, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _delete(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.delete(url).await
|
||||
|
@ -264,6 +264,24 @@ pub static SCORE_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
])
|
||||
});
|
||||
|
||||
pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SCORE_DOCUMENTS").to_shared();
|
||||
let documents = SCORE_DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
|
||||
pub static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -832,8 +832,8 @@ async fn get_document_by_ids_and_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_document_with_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
|
@ -28,6 +28,7 @@ async fn error_delete_unexisting_index() {
|
||||
let (task, code) = index.delete_index_fail().await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `DOES_NOT_EXISTS` not found.",
|
||||
@ -57,7 +58,7 @@ async fn loop_delete_add_documents() {
|
||||
}
|
||||
|
||||
for task in tasks {
|
||||
let response = index.wait_task(task).await;
|
||||
let response = index.wait_task(task).await.succeeded();
|
||||
assert_eq!(response["status"], "succeeded", "{}", response);
|
||||
}
|
||||
}
|
||||
|
@ -52,19 +52,28 @@ async fn no_index_return_empty_list() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_multiple_indexes() {
|
||||
let server = Server::new().await;
|
||||
server.index("test").create(None).await;
|
||||
let (task, _status_code) = server.index("test1").create(Some("key")).await;
|
||||
let server = Server::new_shared();
|
||||
|
||||
server.index("test").wait_task(task.uid()).await.succeeded();
|
||||
let index_without_key = server.unique_index();
|
||||
let (response_without_key, _status_code) = index_without_key.create(None).await;
|
||||
|
||||
let (response, code) = server.list_indexes(None, None).await;
|
||||
let index_with_key = server.unique_index();
|
||||
let (response_with_key, _status_code) = index_with_key.create(Some("key")).await;
|
||||
|
||||
index_without_key.wait_task(response_without_key.uid()).await.succeeded();
|
||||
index_with_key.wait_task(response_with_key.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = server.list_indexes(None, Some(1000)).await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response["results"].is_array());
|
||||
let arr = response["results"].as_array().unwrap();
|
||||
assert_eq!(arr.len(), 2);
|
||||
assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
|
||||
assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
|
||||
assert!(arr.len() >= 2, "Expected at least 2 indexes.");
|
||||
assert!(arr
|
||||
.iter()
|
||||
.any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null));
|
||||
assert!(arr
|
||||
.iter()
|
||||
.any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -1,10 +1,11 @@
|
||||
use crate::common::Server;
|
||||
use crate::common::{shared_does_not_exists_index, Server};
|
||||
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn stats() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.create(Some("id")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
@ -15,7 +16,7 @@ async fn stats() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["numberOfDocuments"], 0);
|
||||
assert!(response["isIndexing"] == false);
|
||||
assert_eq!(response["isIndexing"], false);
|
||||
assert!(response["fieldDistribution"].as_object().unwrap().is_empty());
|
||||
|
||||
let documents = json!([
|
||||
@ -31,7 +32,6 @@ async fn stats() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 1);
|
||||
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
@ -39,7 +39,7 @@ async fn stats() {
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["numberOfDocuments"], 2);
|
||||
assert!(response["isIndexing"] == false);
|
||||
assert_eq!(response["isIndexing"], false);
|
||||
assert_eq!(response["fieldDistribution"]["id"], 2);
|
||||
assert_eq!(response["fieldDistribution"]["name"], 1);
|
||||
assert_eq!(response["fieldDistribution"]["age"], 1);
|
||||
@ -47,11 +47,11 @@ async fn stats() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_get_stats_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").stats().await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (response, code) = index.stats().await;
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
|
@ -2,28 +2,26 @@ use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Server;
|
||||
use crate::common::{shared_does_not_exists_index, shared_index_with_documents, Server};
|
||||
use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_primary_key() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_, code) = index.create(None).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.get().await;
|
||||
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert_eq!(response["uid"], index.uid);
|
||||
assert!(response.get("createdAt").is_some());
|
||||
assert!(response.get("updatedAt").is_some());
|
||||
|
||||
@ -39,24 +37,23 @@ async fn update_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn create_and_update_with_different_encoding() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index_with_encoder("test", Encoder::Gzip);
|
||||
let (_, code) = index.create(None).await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index_with_encoder(Encoder::Gzip);
|
||||
let (create_task, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
index.wait_task(create_task.uid()).await.succeeded();
|
||||
|
||||
let index = server.index_with_encoder("test", Encoder::Brotli);
|
||||
let index = index.with_encoder(Encoder::Brotli);
|
||||
let (task, _status_code) = index.update(Some("primary")).await;
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_nothing() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task1, code) = index.create(None).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
@ -67,35 +64,20 @@ async fn update_nothing() {
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task2.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task2.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_existing_primary_key() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.create(Some("id")).await;
|
||||
let index = shared_index_with_documents().await;
|
||||
|
||||
let (update_task, code) = index.update_index_fail(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
"id": "11",
|
||||
"content": "foobar"
|
||||
}
|
||||
]);
|
||||
index.add_documents(documents, None).await;
|
||||
|
||||
let (task, code) = index.update(Some("primary")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
let response = index.wait_task(update_task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test`: Index already has a primary key: `id`.",
|
||||
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
|
||||
"code": "index_primary_key_already_exists",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
|
||||
@ -106,15 +88,15 @@ async fn error_update_existing_primary_key() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (task, code) = server.index("test").update(None).await;
|
||||
let index = shared_does_not_exists_index().await;
|
||||
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
|
||||
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = server.index("test").wait_task(task.uid()).await;
|
||||
let response = index.wait_task(task.uid()).await.failed();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
|
@ -623,3 +623,88 @@ async fn get_and_set_network() {
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn index_with_shards() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.set_features(json!({"network": true})).await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response["network"]), @r#"true"#);
|
||||
|
||||
// adding self
|
||||
let (response, code) = server.set_network(json!({"self": "myself"})).await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "myself",
|
||||
"remotes": {}
|
||||
}
|
||||
"###);
|
||||
|
||||
// adding remotes
|
||||
let (response, code) = server
|
||||
.set_network(json!({
|
||||
"self": "myself",
|
||||
"remotes": {
|
||||
"myself": {
|
||||
"url": "http://localhost:7700"
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "foo"
|
||||
}
|
||||
}}))
|
||||
.await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "myself",
|
||||
"remotes": {
|
||||
"myself": {
|
||||
"url": "http://localhost:7700",
|
||||
"searchApiKey": null
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "foo"
|
||||
}
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
// adding one remote
|
||||
let (response, code) = server
|
||||
.set_network(json!({"remotes": {
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
}
|
||||
}}))
|
||||
.await;
|
||||
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"self": "myself",
|
||||
"remotes": {
|
||||
"myself": {
|
||||
"url": "http://localhost:7700",
|
||||
"searchApiKey": null
|
||||
},
|
||||
"them": {
|
||||
"url": "http://localhost:7702",
|
||||
"searchApiKey": "baz"
|
||||
},
|
||||
"thy": {
|
||||
"url": "http://localhost:7701",
|
||||
"searchApiKey": "bar"
|
||||
}
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
|
||||
}
|
||||
|
@ -146,8 +146,8 @@ static DOCUMENT_DISTINCT_KEY: &str = "product_id";
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4078
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_offset_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
@ -163,50 +163,50 @@ async fn distinct_search_with_offset_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["estimatedTotalHits"] , @"11");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"10");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"1");
|
||||
snapshot!(format!("{:?}", hits), @r#"["345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["345678"]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
|
||||
let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await;
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["estimatedTotalHits"], @"6");
|
||||
}
|
||||
|
||||
/// testing: https://github.com/meilisearch/meilisearch/issues/4130
|
||||
#[actix_rt::test]
|
||||
async fn distinct_search_with_pagination_no_ranking() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
@ -222,7 +222,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["page"], @"0");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@ -231,7 +231,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
|
||||
snapshot!(response["page"], @"1");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@ -240,7 +240,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@ -249,7 +249,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"2");
|
||||
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"3");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@ -258,7 +258,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"0");
|
||||
snapshot!(format!("{:?}", hits), @r#"[]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"[]"#);
|
||||
snapshot!(response["page"], @"4");
|
||||
snapshot!(response["totalPages"], @"3");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@ -267,7 +267,7 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"3");
|
||||
snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#);
|
||||
snapshot!(format!("{hits:?}"), @r#"["987654", "234567", "345678"]"#);
|
||||
snapshot!(response["page"], @"2");
|
||||
snapshot!(response["totalPages"], @"2");
|
||||
snapshot!(response["totalHits"], @"6");
|
||||
@ -275,13 +275,13 @@ async fn distinct_search_with_pagination_no_ranking() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_at_search_time() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("tamo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
|
||||
let task = index.wait_task(task.uid()).await;
|
||||
let task = index.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(task, name: "succeed");
|
||||
|
||||
fn get_hits(response: &Value) -> Vec<String> {
|
||||
@ -299,7 +299,7 @@ async fn distinct_at_search_time() {
|
||||
let hits = get_hits(&response);
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(hits.len(), @"3");
|
||||
snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###);
|
||||
snapshot!(format!("{hits:?}"), @r###"["1", "2", "3"]"###);
|
||||
snapshot!(response["page"], @"1");
|
||||
snapshot!(response["totalPages"], @"1");
|
||||
snapshot!(response["totalHits"], @"3");
|
||||
|
@ -708,7 +708,7 @@ async fn filter_invalid_attribute_array() {
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -729,7 +729,7 @@ async fn filter_invalid_attribute_string() {
|
||||
|response, code| {
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -886,7 +886,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -912,7 +912,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -933,7 +933,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -959,7 +959,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -985,7 +985,7 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"message": "Index `[uuid]`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -1144,7 +1144,7 @@ async fn search_on_unknown_field() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@ -1165,7 +1165,7 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@ -1183,7 +1183,7 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
|
||||
"code": "invalid_search_attributes_to_search_on",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
|
||||
@ -1196,10 +1196,8 @@ async fn search_on_unknown_field_plus_joker() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_at_search_time() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (task, _) = index.create(None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) =
|
||||
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
@ -1209,7 +1207,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
@ -1224,7 +1222,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
@ -1239,7 +1237,7 @@ async fn distinct_at_search_time() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
|
||||
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
|
||||
"code": "invalid_search_distinct",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||
|
@ -50,13 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@ -65,21 +63,18 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = server.delete_index("test").await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
eprintln!("Settings -> Documents -> test");
|
||||
let index = server.index("test");
|
||||
|
||||
let (task, code) = index.update_settings(settings.clone()).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (task, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.facet_search(query.clone()).await;
|
||||
insta::allow_duplicates! {
|
||||
@ -88,14 +83,13 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
|
||||
|
||||
let (task, code) = server.delete_index("test").await;
|
||||
assert_eq!(code, 202, "{}", task);
|
||||
let response = server.wait_task(task.uid()).await;
|
||||
assert!(response.is_success(), "{:?}", response);
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@ -105,20 +99,20 @@ async fn simple_facet_search() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventure"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_on_movies() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -212,23 +206,23 @@ async fn simple_facet_search_on_movies() {
|
||||
]);
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Action","count":2},{"value":"Adventure","count":3},{"value":"Drama","count":3},{"value":"Fantasy","count":1},{"value":"Romance","count":1},{"value":"Science Fiction","count":1}]"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn advanced_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@ -251,8 +245,8 @@ async fn advanced_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn more_advanced_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@ -275,8 +269,8 @@ async fn more_advanced_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_with_max_values() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
|
||||
@ -287,14 +281,14 @@ async fn simple_facet_search_with_max_values() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_by_count_with_max_values() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
@ -309,14 +303,14 @@ async fn simple_facet_search_by_count_with_max_values() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn non_filterable_facet_search_error() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
@ -324,17 +318,17 @@ async fn non_filterable_facet_search_error() {
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "adv"})).await;
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn facet_search_dont_support_words() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
||||
@ -344,14 +338,14 @@ async fn facet_search_dont_support_words() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 0);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search_with_sort_by_count() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
|
||||
@ -362,7 +356,7 @@ async fn simple_facet_search_with_sort_by_count() {
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
let hits = response["facetHits"].as_array().unwrap();
|
||||
assert_eq!(hits.len(), 2);
|
||||
assert_eq!(hits[0], json!({ "value": "Action", "count": 3 }));
|
||||
@ -371,25 +365,25 @@ async fn simple_facet_search_with_sort_by_count() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_and_deactivate_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": false,
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "The facet search is disabled for this index",
|
||||
@ -402,8 +396,8 @@ async fn add_documents_and_deactivate_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_and_add_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -411,16 +405,16 @@ async fn deactivate_facet_search_and_add_documents() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "The facet search is disabled for this index",
|
||||
@ -433,8 +427,8 @@ async fn deactivate_facet_search_and_add_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -442,31 +436,31 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": true,
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -474,25 +468,25 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
|
||||
"filterableAttributes": ["genres"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"facetSearch": serde_json::Value::Null,
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
||||
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -618,8 +612,8 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn distinct_facet_search_on_movies() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -925,26 +919,26 @@ async fn distinct_facet_search_on_movies() {
|
||||
]);
|
||||
let (response, code) =
|
||||
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await;
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, _code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;
|
||||
|
||||
// non-exhaustive facet count is counting 27 documents with the facet query "blob" but there are only 23 documents with a distinct color.
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":27}]"###);
|
||||
|
||||
let (response, code) =
|
||||
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "", "exhaustiveFacetCount": true })).await;
|
||||
|
||||
// exhaustive facet count is counting 23 documents with the facet query "blob" which is the number of distinct colors.
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":23}]"###);
|
||||
}
|
||||
|
@ -720,7 +720,7 @@ async fn test_filterable_attributes_priority() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
|
||||
"message": "Index `[uuid]`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -746,7 +746,7 @@ async fn test_filterable_attributes_priority() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
|
||||
"message": "Index `[uuid]`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
|
@ -76,6 +76,48 @@ static SINGLE_DOCUMENT_VEC: Lazy<Value> = Lazy::new(|| {
|
||||
}])
|
||||
});
|
||||
|
||||
static TEST_DISTINCT_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
// for query "Captain Marvel" and vector [1.0, 1.0]
|
||||
json!([
|
||||
{
|
||||
"id": 0,
|
||||
"search": "Captain Planet",
|
||||
"desc": "#2 for keyword search, #3 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [-1.0, 0.0],
|
||||
},
|
||||
"distinct": 0
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"search": "Captain Marvel",
|
||||
"desc": "#1 for keyword search, #4 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [-1.0, -1.0],
|
||||
},
|
||||
"distinct": 1
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"search": "Some Captain at least",
|
||||
"desc": "#3 for keyword search, #1 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [1.0, 1.0],
|
||||
},
|
||||
"distinct": 0
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"search": "Irrelevant Capitaine",
|
||||
"desc": "#4 for keyword search, #2 for hybrid search",
|
||||
"_vectors": {
|
||||
"default": [1.0, 0.0],
|
||||
},
|
||||
"distinct": 1
|
||||
},
|
||||
])
|
||||
});
|
||||
|
||||
static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
@ -493,6 +535,50 @@ async fn query_combination() {
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
}
|
||||
|
||||
// see <https://github.com/meilisearch/meilisearch/issues/5526>
|
||||
#[actix_rt::test]
|
||||
async fn distinct_is_applied() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents_user_provided(&server, &TEST_DISTINCT_DOCUMENTS).await;
|
||||
|
||||
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// pure keyword
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.0, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1},{"id":0,"search":"Captain Planet","desc":"#2 for keyword search, #3 for hybrid search","distinct":0}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"null");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
|
||||
// pure semantic
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":3,"search":"Irrelevant Capitaine","desc":"#4 for keyword search, #2 for hybrid search","distinct":1}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"2");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
|
||||
// hybrid
|
||||
let (response, code) = index
|
||||
.search_post(
|
||||
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1}]"###);
|
||||
snapshot!(response["semanticHitCount"], @"1");
|
||||
snapshot!(response["estimatedTotalHits"], @"2");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn retrieve_vectors() {
|
||||
let server = Server::new().await;
|
||||
|
@ -89,9 +89,9 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
.update_settings(
|
||||
@ -196,9 +196,9 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@ -211,10 +211,10 @@ async fn force_locales() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -274,9 +274,9 @@ async fn force_locales() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales_with_pattern() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@ -289,10 +289,10 @@ async fn force_locales_with_pattern() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -352,9 +352,9 @@ async fn force_locales_with_pattern() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_locales_with_pattern_nested() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@ -365,10 +365,10 @@ async fn force_locales_with_pattern_nested() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -423,9 +423,9 @@ async fn force_locales_with_pattern_nested() {
|
||||
}
|
||||
#[actix_rt::test]
|
||||
async fn force_different_locales_with_pattern() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@ -440,10 +440,10 @@ async fn force_different_locales_with_pattern() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -499,9 +499,9 @@ async fn force_different_locales_with_pattern() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@ -518,10 +518,10 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -577,9 +577,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn auto_infer_locales_at_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(
|
||||
@ -592,10 +592,10 @@ async fn auto_infer_locales_at_search() {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -676,9 +676,9 @@ async fn auto_infer_locales_at_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn force_different_locales_with_pattern_nested() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@ -691,10 +691,10 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -774,9 +774,9 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn settings_change() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -789,10 +789,10 @@ async fn settings_change() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 1,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -852,10 +852,10 @@ async fn settings_change() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 2,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -906,9 +906,9 @@ async fn settings_change() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_locales() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
index
|
||||
.update_settings(
|
||||
@ -945,9 +945,9 @@ async fn invalid_locales() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn invalid_localized_attributes_rules() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
"localizedAttributes": [
|
||||
@ -1015,19 +1015,19 @@ async fn invalid_localized_attributes_rules() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_facet_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
"filterableAttributes": ["name_en", "name_ja", "name_zh"],
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -1073,9 +1073,9 @@ async fn simple_facet_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn facet_search_with_localized_attributes() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _) = index
|
||||
.update_settings(json!({
|
||||
@ -1086,10 +1086,10 @@ async fn facet_search_with_localized_attributes() {
|
||||
]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -1146,9 +1146,9 @@ async fn facet_search_with_localized_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn swedish_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{"id": "tra1-1", "product": "trä"},
|
||||
{"id": "tra2-1", "product": "traktor"},
|
||||
@ -1269,9 +1269,9 @@ async fn swedish_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn german_search() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let index = server.index("test");
|
||||
let documents = json!([
|
||||
{"id": 1, "product": "Interkulturalität"},
|
||||
{"id": 2, "product": "Wissensorganisation"},
|
||||
|
@ -2,11 +2,11 @@ use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -48,8 +48,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
@ -75,8 +75,8 @@ async fn simple_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_typo() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Capitain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
@ -102,8 +102,8 @@ async fn search_with_typo() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_with_unknown_word() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Supercopter Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -7,7 +7,7 @@ async fn default_search_should_return_estimated_total_hit() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert!(response.get("estimatedTotalHits").is_some());
|
||||
assert!(response.get("limit").is_some());
|
||||
assert!(response.get("offset").is_some());
|
||||
@ -25,7 +25,7 @@ async fn simple_search() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"page": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 1);
|
||||
@ -44,7 +44,7 @@ async fn page_zero_should_not_return_any_result() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"page": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 0);
|
||||
@ -58,7 +58,7 @@ async fn hits_per_page_1() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"hitsPerPage": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
@ -72,7 +72,7 @@ async fn hits_per_page_0_should_not_return_any_result() {
|
||||
let index = shared_index_with_documents().await;
|
||||
index
|
||||
.search(json!({"hitsPerPage": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
@ -126,7 +126,7 @@ async fn ensure_placeholder_search_hit_count_valid() {
|
||||
for page in 0..=4 {
|
||||
index
|
||||
.search(json!({"page": page, "hitsPerPage": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
assert_eq!(response["totalHits"], 4);
|
||||
assert_eq!(response["totalPages"], 4);
|
||||
})
|
||||
|
@ -2,11 +2,11 @@ use meili_snap::{json_string, snapshot};
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Shared, Value};
|
||||
use crate::json;
|
||||
|
||||
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
|
||||
let index = server.index("test");
|
||||
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.add_documents(documents.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -34,8 +34,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
@ -51,8 +51,8 @@ async fn simple_search_on_title() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_no_searchable_attribute_set() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -93,8 +93,8 @@ async fn search_no_searchable_attribute_set() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_all_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
|
||||
@ -106,8 +106,8 @@ async fn search_on_all_attributes() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_all_attributes_restricted_set() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
@ -121,8 +121,8 @@ async fn search_on_all_attributes_restricted_set() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_prefix_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// simple search should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
@ -135,8 +135,8 @@ async fn simple_prefix_search_on_title() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_title_matching_strategy_all() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search matching strategy all should only return 1 document (ids: 2).
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["title"], "matchingStrategy": "all"}), |response, code| {
|
||||
@ -148,8 +148,8 @@ async fn simple_search_on_title_matching_strategy_all() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_on_no_field() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
// simple search on no field shouldn't return any document.
|
||||
index
|
||||
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": []}), |response, code| {
|
||||
@ -161,8 +161,8 @@ async fn simple_search_on_no_field() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before document 2.
|
||||
index
|
||||
@ -189,8 +189,8 @@ async fn word_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_ranking_rule_order_exact_words() {
|
||||
let server = Server::new().await;
|
||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) = index
|
||||
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
|
||||
.await;
|
||||
@ -221,9 +221,9 @@ async fn word_ranking_rule_order_exact_words() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn typo_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Capitain Marivel",
|
||||
@ -260,9 +260,9 @@ async fn typo_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attributes_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@ -301,9 +301,9 @@ async fn attributes_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn exactness_ranking_rule_order() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@ -340,9 +340,9 @@ async fn exactness_ranking_rule_order() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_on_exact_field() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(
|
||||
&server,
|
||||
server,
|
||||
&json!([
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
@ -359,7 +359,7 @@ async fn search_on_exact_field() {
|
||||
|
||||
let (response, code) =
|
||||
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
|
||||
assert_eq!(202, code, "{:?}", response);
|
||||
assert_eq!(202, code, "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
// Searching on an exact attribute should only return the document matching without typo.
|
||||
index
|
||||
@ -372,7 +372,7 @@ async fn search_on_exact_field() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn phrase_search_on_title() {
|
||||
let server = Server::new().await;
|
||||
let server = Server::new_shared();
|
||||
let documents = json!([
|
||||
{ "id": 8, "desc": "Document Review", "title": "Document Review Specialist II" },
|
||||
{ "id": 5, "desc": "Document Review", "title": "Document Review Attorney" },
|
||||
@ -383,7 +383,7 @@ async fn phrase_search_on_title() {
|
||||
{ "id": 7, "desc": "Document Review", "title": "Document Review Specialist II" },
|
||||
{ "id": 6, "desc": "Document Review", "title": "Document Review (Entry Level)" }
|
||||
]);
|
||||
let index = index_with_documents(&server, &documents).await;
|
||||
let index = index_with_documents(server, &documents).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
@ -416,3 +416,381 @@ async fn phrase_search_on_title() {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
static NESTED_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"details": {
|
||||
"title": "Shazam!",
|
||||
"desc": "a Captain Marvel ersatz",
|
||||
"weaknesses": ["magic", "requires transformation"],
|
||||
"outfit": {
|
||||
"has_cape": true,
|
||||
"colors": {
|
||||
"primary": "red",
|
||||
"secondary": "gold"
|
||||
}
|
||||
}
|
||||
},
|
||||
"id": "1",
|
||||
},
|
||||
{
|
||||
"details": {
|
||||
"title": "Captain Planet",
|
||||
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||
"blue_skin": true,
|
||||
"outfit": {
|
||||
"has_cape": false
|
||||
}
|
||||
},
|
||||
"id": "2",
|
||||
},
|
||||
{
|
||||
"details": {
|
||||
"title": "Captain Marvel",
|
||||
"desc": "a Shazam ersatz",
|
||||
"weaknesses": ["magic", "power instability"],
|
||||
"outfit": {
|
||||
"has_cape": false
|
||||
}
|
||||
},
|
||||
"id": "3",
|
||||
}])
|
||||
});
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard should match to 'details.' attribute
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard should match to any attribute inside 'details.'
|
||||
// It's worth noting the difference between 'details.*' and '*.title'
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Should return 1 document (ids: 1)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "gold", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Should return 2 documents (ids: 1 and 2)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "true", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
let (task, _status_code) =
|
||||
index.update_settings_searchable_attributes(json!(["details.title"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_title_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3).
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_details_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "mag", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "3"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_search_on_title_matching_strategy_all() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Nested search matching strategy all should only return 1 document (ids: 3)
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
}
|
||||
]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before documents 1 and 2
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.desc", "*.title"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]
|
||||
"###
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() {
|
||||
let server = Server::new_shared();
|
||||
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
|
||||
|
||||
// Document 3 should appear before documents 1 and 2
|
||||
index
|
||||
.search(
|
||||
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]),
|
||||
@r###"
|
||||
[
|
||||
{
|
||||
"id": "3"
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
{
|
||||
"id": "2"
|
||||
}
|
||||
]
|
||||
"###
|
||||
);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ source: crates/meilisearch/tests/search/distinct.rs
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "tamo",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
|
@ -3,8 +3,8 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
|
||||
index.wait_task(task1.uid()).await.succeeded();
|
||||
@ -24,8 +24,8 @@ async fn set_and_reset_distinct_attribute() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(update_task1.uid()).await.succeeded();
|
||||
|
@ -11,59 +11,62 @@ macro_rules! test_setting_routes {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (_response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 404);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
server.index("").wait_task(0).await;
|
||||
let (response, code) = server.index("test").get().await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (_, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = server.index("").wait_task(0).await;
|
||||
assert_eq!(response["status"], "failed");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_default() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{}", response);
|
||||
index.wait_task(0).await;
|
||||
let url = format!("/indexes/test/settings/{}",
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_unexisting_index() {
|
||||
let server = Server::new_shared();
|
||||
let index_name = uuid::Uuid::new_v4().to_string();
|
||||
let url = format!("/indexes/{index_name}/settings/{}",
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.delete(url).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
|
||||
assert_eq!(code, 404, "{response}");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_default() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(code, 202, "{response}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let url = format!("/indexes/{}/settings/{}",
|
||||
index.uid,
|
||||
stringify!($setting)
|
||||
.chars()
|
||||
.map(|c| if c == '_' { '-' } else { c })
|
||||
.collect::<String>());
|
||||
let (response, code) = server.service.get(url).await;
|
||||
assert_eq!(code, 200, "{response}");
|
||||
let expected = crate::json!($default_value);
|
||||
assert_eq!(expected, response);
|
||||
}
|
||||
@ -185,15 +188,16 @@ test_setting_routes!(
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (response, code) = server.index("test").settings().await;
|
||||
assert_eq!(code, 404, "{}", response)
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 404, "{response}")
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
@ -237,9 +241,8 @@ async fn get_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn secrets_are_hidden_in_settings() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (response, _code) = index.create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
@ -259,11 +262,11 @@ async fn secrets_are_hidden_in_settings() {
|
||||
.await;
|
||||
meili_snap::snapshot!(code, @"202 Accepted");
|
||||
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||
@r###"
|
||||
{
|
||||
"taskUid": 1,
|
||||
"indexUid": "test",
|
||||
"taskUid": "[task_uid]",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "enqueued",
|
||||
"type": "settingsUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
@ -272,7 +275,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
let settings_update_uid = response.uid();
|
||||
|
||||
index.wait_task(settings_update_uid).await;
|
||||
index.wait_task(settings_update_uid).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
@ -360,16 +363,16 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_update_settings_unknown_field() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (_response, code) = index.update_settings(json!({"foo": 12})).await;
|
||||
assert_eq!(code, 400);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_partial_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (response, code) = index.settings().await;
|
||||
@ -388,20 +391,18 @@ async fn test_partial_update() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_delete_settings_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.delete_settings().await;
|
||||
assert_eq!(code, 202);
|
||||
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
|
||||
assert_eq!(response["status"], "failed");
|
||||
index.wait_task(task.uid()).await.failed();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn reset_all_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = json!([
|
||||
{
|
||||
@ -413,7 +414,6 @@ async fn reset_all_settings() {
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 202);
|
||||
assert_eq!(response["taskUid"], 0);
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (update_task,_status_code) = index
|
||||
@ -446,17 +446,15 @@ async fn reset_all_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn update_setting_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
let (task, code) = index.update_settings(json!({})).await;
|
||||
assert_eq!(code, 202);
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
let (_response, code) = index.get().await;
|
||||
assert_eq!(code, 200);
|
||||
let (task, _status_code) = index.delete_settings().await;
|
||||
let response = index.wait_task(task.uid()).await;
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -477,8 +475,8 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_set_invalid_ranking_rules() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
|
||||
let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
|
||||
@ -495,8 +493,8 @@ async fn error_set_invalid_ranking_rules() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -516,8 +514,8 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn granular_filterable_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
index.create(None).await;
|
||||
|
||||
let (response, code) =
|
||||
@ -535,7 +533,7 @@ async fn granular_filterable_attributes() {
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response["filterableAttributes"]), @r###"
|
||||
[
|
||||
{
|
||||
|
@ -26,8 +26,8 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -38,7 +38,7 @@ async fn attribute_scale_search() {
|
||||
"rankingRules": ["words", "typo", "proximity"],
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [1, 3, 2] instead of [3, 1, 2]
|
||||
@ -99,8 +99,8 @@ async fn attribute_scale_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_phrase_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -167,8 +167,8 @@ async fn attribute_scale_phrase_search() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn word_scale_set_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -282,8 +282,8 @@ async fn word_scale_set_and_reset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn attribute_scale_default_ranking_rules() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
@ -293,7 +293,7 @@ async fn attribute_scale_default_ranking_rules() {
|
||||
"proximityPrecision": "byAttribute"
|
||||
}))
|
||||
.await;
|
||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
||||
assert_eq!("202", code.as_str(), "{response:?}");
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
// the expected order is [3, 1, 2]
|
||||
|
@ -5,8 +5,8 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn set_and_reset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (task, _code) = index
|
||||
.update_settings(json!({
|
||||
@ -70,8 +70,8 @@ async fn set_and_search() {
|
||||
},
|
||||
]);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
@ -224,8 +224,8 @@ async fn advanced_synergies() {
|
||||
},
|
||||
]);
|
||||
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (add_task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(add_task.uid()).await.succeeded();
|
||||
|
@ -6,11 +6,11 @@ use crate::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let expected_response = json!({
|
||||
"message": "Index `test` not found.",
|
||||
"message": format!("Index `{}` not found.", index.uid),
|
||||
"code": "index_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#index_not_found"
|
||||
@ -26,12 +26,12 @@ async fn similar_unexisting_index() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_unexisting_parameter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
index
|
||||
.similar(json!({"id": 287947, "marin": "hello"}), |response, code| {
|
||||
assert_eq!(code, 400, "{}", response);
|
||||
assert_eq!(code, 400, "{response}");
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
})
|
||||
.await;
|
||||
@ -39,8 +39,8 @@ async fn similar_unexisting_parameter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -53,7 +53,7 @@ async fn similar_bad_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"id": ["doggo"], "embedder": "manual"})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -69,8 +69,8 @@ async fn similar_bad_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -83,7 +83,7 @@ async fn similar_bad_ranking_score_threshold() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": ["doggo"]})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -99,8 +99,8 @@ async fn similar_bad_ranking_score_threshold() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_invalid_ranking_score_threshold() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -113,7 +113,7 @@ async fn similar_invalid_ranking_score_threshold() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": 42})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
@ -129,8 +129,8 @@ async fn similar_invalid_ranking_score_threshold() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_invalid_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -143,7 +143,7 @@ async fn similar_invalid_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": "http://invalid-docid/", "embedder": "manual"})).await;
|
||||
@ -160,8 +160,8 @@ async fn similar_invalid_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_not_found_id() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -174,7 +174,7 @@ async fn similar_not_found_id() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await;
|
||||
@ -191,8 +191,8 @@ async fn similar_not_found_id() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_offset() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -205,7 +205,7 @@ async fn similar_bad_offset() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await;
|
||||
@ -233,8 +233,8 @@ async fn similar_bad_offset() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_limit() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -247,7 +247,7 @@ async fn similar_bad_limit() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await;
|
||||
@ -277,8 +277,8 @@ async fn similar_bad_limit() {
|
||||
async fn similar_bad_filter() {
|
||||
// Since a filter is deserialized as a json Value it will never fail to deserialize.
|
||||
// Thus the error message is not generated by deserr but written by us.
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -291,7 +291,7 @@ async fn similar_bad_filter() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
@ -316,8 +316,8 @@ async fn similar_bad_filter() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_object() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -330,7 +330,7 @@ async fn filter_invalid_syntax_object() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -354,8 +354,8 @@ async fn filter_invalid_syntax_object() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -368,7 +368,7 @@ async fn filter_invalid_syntax_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -392,8 +392,8 @@ async fn filter_invalid_syntax_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_syntax_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -406,7 +406,7 @@ async fn filter_invalid_syntax_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -432,8 +432,8 @@ async fn filter_invalid_syntax_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -446,7 +446,7 @@ async fn filter_invalid_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -473,8 +473,8 @@ async fn filter_invalid_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_invalid_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -487,7 +487,7 @@ async fn filter_invalid_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -514,8 +514,8 @@ async fn filter_invalid_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -528,7 +528,7 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -554,8 +554,8 @@ async fn filter_reserved_geo_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -568,7 +568,7 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -594,8 +594,8 @@ async fn filter_reserved_geo_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_attribute_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -608,7 +608,7 @@ async fn filter_reserved_attribute_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -634,8 +634,8 @@ async fn filter_reserved_attribute_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_attribute_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -648,7 +648,7 @@ async fn filter_reserved_attribute_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -674,8 +674,8 @@ async fn filter_reserved_attribute_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_point_array() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -688,7 +688,7 @@ async fn filter_reserved_geo_point_array() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -714,8 +714,8 @@ async fn filter_reserved_geo_point_array() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn filter_reserved_geo_point_string() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -728,7 +728,7 @@ async fn filter_reserved_geo_point_string() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
@ -754,8 +754,8 @@ async fn filter_reserved_geo_point_string() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_retrieve_vectors() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) =
|
||||
index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await;
|
||||
@ -806,8 +806,8 @@ async fn similar_bad_retrieve_vectors() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn similar_bad_embedder() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -820,7 +820,7 @@ async fn similar_bad_embedder() {
|
||||
"filterableAttributes": ["title"]}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await;
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
|
@ -6,8 +6,8 @@ use crate::vector::generate_default_user_provided_documents;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -65,8 +65,8 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn binary_quantize_before_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -139,8 +139,8 @@ async fn binary_quantize_before_sending_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn binary_quantize_after_sending_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -226,8 +226,8 @@ async fn binary_quantize_after_sending_documents() {
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn try_to_disable_binary_quantization() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
@ -256,11 +256,11 @@ async fn try_to_disable_binary_quantization() {
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let ret = server.wait_task(response.uid()).await;
|
||||
snapshot!(ret, @r#"
|
||||
snapshot!(json_string!(ret, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".finishedAt" => "[date]", ".startedAt" => "[date]" }), @r#"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "doggo",
|
||||
"indexUid": "[uuid]",
|
||||
"status": "failed",
|
||||
"type": "settingsUpdate",
|
||||
"canceledBy": null,
|
||||
@ -274,7 +274,7 @@ async fn try_to_disable_binary_quantization() {
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
|
||||
"message": "Index `[uuid]`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
|
||||
"code": "invalid_settings_embedders",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"
|
||||
|
@ -111,6 +111,7 @@ utoipa = { version = "5.3.1", features = [
|
||||
"openapi_extensions",
|
||||
] }
|
||||
lru = "0.13.0"
|
||||
blake2 = "0.10.6"
|
||||
|
||||
[dev-dependencies]
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
|
@ -50,7 +50,7 @@ impl AttributePatterns {
|
||||
///
|
||||
/// * `pattern` - The pattern to match against.
|
||||
/// * `str` - The string to match against the pattern.
|
||||
fn match_pattern(pattern: &str, str: &str) -> PatternMatch {
|
||||
pub fn match_pattern(pattern: &str, str: &str) -> PatternMatch {
|
||||
// If the pattern is a wildcard, return Match
|
||||
if pattern == "*" {
|
||||
return PatternMatch::Match;
|
||||
|
@ -1,11 +1,13 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use heed::RoTxn;
|
||||
use itertools::Itertools;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use crate::score_details::{ScoreDetails, ScoreValue, ScoringStrategy};
|
||||
use crate::search::new::{distinct_fid, distinct_single_docid};
|
||||
use crate::search::SemanticSearch;
|
||||
use crate::{MatchingWords, Result, Search, SearchResult};
|
||||
use crate::{Index, MatchingWords, Result, Search, SearchResult};
|
||||
|
||||
struct ScoreWithRatioResult {
|
||||
matching_words: MatchingWords,
|
||||
@ -91,7 +93,10 @@ impl ScoreWithRatioResult {
|
||||
keyword_results: Self,
|
||||
from: usize,
|
||||
length: usize,
|
||||
) -> (SearchResult, u32) {
|
||||
distinct: Option<&str>,
|
||||
index: &Index,
|
||||
rtxn: &RoTxn<'_>,
|
||||
) -> Result<(SearchResult, u32)> {
|
||||
#[derive(Clone, Copy)]
|
||||
enum ResultSource {
|
||||
Semantic,
|
||||
@ -106,8 +111,9 @@ impl ScoreWithRatioResult {
|
||||
vector_results.document_scores.len() + keyword_results.document_scores.len(),
|
||||
);
|
||||
|
||||
let mut documents_seen = RoaringBitmap::new();
|
||||
for ((docid, (main_score, _sub_score)), source) in vector_results
|
||||
let distinct_fid = distinct_fid(distinct, index, rtxn)?;
|
||||
let mut excluded_documents = RoaringBitmap::new();
|
||||
for res in vector_results
|
||||
.document_scores
|
||||
.into_iter()
|
||||
.zip(std::iter::repeat(ResultSource::Semantic))
|
||||
@ -121,13 +127,33 @@ impl ScoreWithRatioResult {
|
||||
compare_scores(left, right).is_ge()
|
||||
},
|
||||
)
|
||||
// remove documents we already saw
|
||||
.filter(|((docid, _), _)| documents_seen.insert(*docid))
|
||||
// remove documents we already saw and apply distinct rule
|
||||
.filter_map(|item @ ((docid, _), _)| {
|
||||
if !excluded_documents.insert(docid) {
|
||||
// the document was already added, or is indistinct from an already-added document.
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(distinct_fid) = distinct_fid {
|
||||
if let Err(error) = distinct_single_docid(
|
||||
index,
|
||||
rtxn,
|
||||
distinct_fid,
|
||||
docid,
|
||||
&mut excluded_documents,
|
||||
) {
|
||||
return Some(Err(error));
|
||||
}
|
||||
}
|
||||
|
||||
Some(Ok(item))
|
||||
})
|
||||
// start skipping **after** the filter
|
||||
.skip(from)
|
||||
// take **after** skipping
|
||||
.take(length)
|
||||
{
|
||||
let ((docid, (main_score, _sub_score)), source) = res?;
|
||||
if let ResultSource::Semantic = source {
|
||||
semantic_hit_count += 1;
|
||||
}
|
||||
@ -136,10 +162,24 @@ impl ScoreWithRatioResult {
|
||||
document_scores.push(main_score);
|
||||
}
|
||||
|
||||
(
|
||||
// compute the set of candidates from both sets
|
||||
let candidates = vector_results.candidates | keyword_results.candidates;
|
||||
let must_remove_redundant_candidates = distinct_fid.is_some();
|
||||
let candidates = if must_remove_redundant_candidates {
|
||||
// patch-up the candidates to remove the indistinct documents, then add back the actual hits
|
||||
let mut candidates = candidates - excluded_documents;
|
||||
for docid in &documents_ids {
|
||||
candidates.insert(*docid);
|
||||
}
|
||||
candidates
|
||||
} else {
|
||||
candidates
|
||||
};
|
||||
|
||||
Ok((
|
||||
SearchResult {
|
||||
matching_words: keyword_results.matching_words,
|
||||
candidates: vector_results.candidates | keyword_results.candidates,
|
||||
candidates,
|
||||
documents_ids,
|
||||
document_scores,
|
||||
degraded: vector_results.degraded | keyword_results.degraded,
|
||||
@ -147,7 +187,7 @@ impl ScoreWithRatioResult {
|
||||
| keyword_results.used_negative_operator,
|
||||
},
|
||||
semantic_hit_count,
|
||||
)
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@ -226,8 +266,15 @@ impl Search<'_> {
|
||||
let keyword_results = ScoreWithRatioResult::new(keyword_results, 1.0 - semantic_ratio);
|
||||
let vector_results = ScoreWithRatioResult::new(vector_results, semantic_ratio);
|
||||
|
||||
let (merge_results, semantic_hit_count) =
|
||||
ScoreWithRatioResult::merge(vector_results, keyword_results, self.offset, self.limit);
|
||||
let (merge_results, semantic_hit_count) = ScoreWithRatioResult::merge(
|
||||
vector_results,
|
||||
keyword_results,
|
||||
self.offset,
|
||||
self.limit,
|
||||
search.distinct.as_deref(),
|
||||
search.index,
|
||||
search.rtxn,
|
||||
)?;
|
||||
assert!(merge_results.documents_ids.len() <= self.limit);
|
||||
Ok((merge_results, Some(semantic_hit_count)))
|
||||
}
|
||||
|
@ -4,7 +4,9 @@ use super::logger::SearchLogger;
|
||||
use super::ranking_rules::{BoxRankingRule, RankingRuleQueryTrait};
|
||||
use super::SearchContext;
|
||||
use crate::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use crate::search::new::distinct::{apply_distinct_rule, distinct_single_docid, DistinctOutput};
|
||||
use crate::search::new::distinct::{
|
||||
apply_distinct_rule, distinct_fid, distinct_single_docid, DistinctOutput,
|
||||
};
|
||||
use crate::{Result, TimeBudget};
|
||||
|
||||
pub struct BucketSortOutput {
|
||||
@ -35,16 +37,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
logger.ranking_rules(&ranking_rules);
|
||||
logger.initial_universe(universe);
|
||||
|
||||
let distinct_field = match distinct {
|
||||
Some(distinct) => Some(distinct),
|
||||
None => ctx.index.distinct_field(ctx.txn)?,
|
||||
};
|
||||
|
||||
let distinct_fid = if let Some(field) = distinct_field {
|
||||
ctx.index.fields_ids_map(ctx.txn)?.id(field)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let distinct_fid = distinct_fid(distinct, ctx.index, ctx.txn)?;
|
||||
|
||||
if universe.len() < from as u64 {
|
||||
return Ok(BucketSortOutput {
|
||||
|
@ -9,7 +9,7 @@ use crate::heed_codec::facet::{
|
||||
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec, FieldDocIdFacetCodec,
|
||||
};
|
||||
use crate::heed_codec::BytesRefCodec;
|
||||
use crate::{Index, Result, SearchContext};
|
||||
use crate::{FieldId, Index, Result, SearchContext};
|
||||
|
||||
pub struct DistinctOutput {
|
||||
pub remaining: RoaringBitmap,
|
||||
@ -121,3 +121,18 @@ pub fn facet_string_values<'a>(
|
||||
fn facet_values_prefix_key(distinct: u16, id: u32) -> [u8; FID_SIZE + DOCID_SIZE] {
|
||||
concat_arrays::concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes())
|
||||
}
|
||||
|
||||
pub fn distinct_fid(
|
||||
query_distinct_field: Option<&str>,
|
||||
index: &Index,
|
||||
rtxn: &RoTxn<'_>,
|
||||
) -> Result<Option<FieldId>> {
|
||||
let distinct_field = match query_distinct_field {
|
||||
Some(distinct) => Some(distinct),
|
||||
None => index.distinct_field(rtxn)?,
|
||||
};
|
||||
|
||||
let distinct_fid =
|
||||
if let Some(field) = distinct_field { index.fields_ids_map(rtxn)?.id(field) } else { None };
|
||||
Ok(distinct_fid)
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ use std::time::Duration;
|
||||
use bucket_sort::{bucket_sort, BucketSortOutput};
|
||||
use charabia::{Language, TokenizerBuilder};
|
||||
use db_cache::DatabaseCache;
|
||||
pub use distinct::{distinct_fid, distinct_single_docid};
|
||||
use exact_attribute::ExactAttribute;
|
||||
use graph_based_ranking_rule::{Exactness, Fid, Position, Proximity, Typo};
|
||||
use heed::RoTxn;
|
||||
@ -47,11 +48,11 @@ use sort::Sort;
|
||||
|
||||
use self::distinct::facet_string_values;
|
||||
use self::geo_sort::GeoSort;
|
||||
pub use self::geo_sort::Parameter as GeoSortParameter;
|
||||
pub use self::geo_sort::Strategy as GeoSortStrategy;
|
||||
pub use self::geo_sort::{Parameter as GeoSortParameter, Strategy as GeoSortStrategy};
|
||||
use self::graph_based_ranking_rule::Words;
|
||||
use self::interner::Interned;
|
||||
use self::vector_sort::VectorSort;
|
||||
use crate::attribute_patterns::{match_pattern, PatternMatch};
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::index::PrefixSearch;
|
||||
use crate::localized_attributes_rules::LocalizedFieldIds;
|
||||
@ -120,17 +121,37 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
let searchable_fields_weights = self.index.searchable_fields_and_weights(self.txn)?;
|
||||
let exact_attributes_ids = self.index.exact_attributes_ids(self.txn)?;
|
||||
|
||||
let mut wildcard = false;
|
||||
let mut universal_wildcard = false;
|
||||
|
||||
let mut restricted_fids = RestrictedFids::default();
|
||||
for field_name in attributes_to_search_on {
|
||||
if field_name == "*" {
|
||||
wildcard = true;
|
||||
universal_wildcard = true;
|
||||
// we cannot early exit as we want to returns error in case of unknown fields
|
||||
continue;
|
||||
}
|
||||
let searchable_weight =
|
||||
searchable_fields_weights.iter().find(|(name, _, _)| name == field_name);
|
||||
|
||||
// The field is not searchable but may contain a wildcard pattern
|
||||
if searchable_weight.is_none() && field_name.contains("*") {
|
||||
let matching_searchable_weights: Vec<_> = searchable_fields_weights
|
||||
.iter()
|
||||
.filter(|(name, _, _)| match_pattern(field_name, name) == PatternMatch::Match)
|
||||
.collect();
|
||||
|
||||
if !matching_searchable_weights.is_empty() {
|
||||
for (_name, fid, weight) in matching_searchable_weights {
|
||||
if exact_attributes_ids.contains(fid) {
|
||||
restricted_fids.exact.push((*fid, *weight));
|
||||
} else {
|
||||
restricted_fids.tolerant.push((*fid, *weight));
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let (fid, weight) = match searchable_weight {
|
||||
// The Field id exist and the field is searchable
|
||||
Some((_name, fid, weight)) => (*fid, *weight),
|
||||
@ -160,7 +181,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
};
|
||||
}
|
||||
|
||||
if wildcard {
|
||||
if universal_wildcard {
|
||||
self.restricted_fids = None;
|
||||
} else {
|
||||
self.restricted_fids = Some(restricted_fids);
|
||||
|
@ -92,12 +92,12 @@ fn find_one_typo_derivations(
|
||||
let mut stream = fst.search_with_state(Intersection(starts, &dfa)).into_stream();
|
||||
|
||||
while let Some((derived_word, state)) = stream.next() {
|
||||
let derived_word = std::str::from_utf8(derived_word)?;
|
||||
let derived_word = ctx.word_interner.insert(derived_word.to_owned());
|
||||
let d = dfa.distance(state.1);
|
||||
match d.to_u8() {
|
||||
0 => (),
|
||||
1 => {
|
||||
let derived_word = std::str::from_utf8(derived_word)?;
|
||||
let derived_word = ctx.word_interner.insert(derived_word.to_owned());
|
||||
let cf = visit(derived_word)?;
|
||||
if cf.is_break() {
|
||||
break;
|
||||
|
@ -76,6 +76,7 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -72,7 +72,7 @@ fn test_2gram_simple() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@ -103,7 +103,7 @@ fn test_3gram_simple() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@ -153,7 +153,7 @@ fn test_no_disable_ngrams() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@ -179,7 +179,7 @@ fn test_2gram_prefix() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@ -208,7 +208,7 @@ fn test_3gram_prefix() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@ -260,7 +260,7 @@ fn test_disable_split_words() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
@ -151,7 +151,7 @@ fn test_no_typo() {
|
||||
let index = create_index();
|
||||
index
|
||||
.update_settings(|s| {
|
||||
s.set_autorize_typos(false);
|
||||
s.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
@ -19,10 +19,7 @@ use crate::update::{
|
||||
};
|
||||
use crate::vector::settings::{EmbedderSource, EmbeddingSettings};
|
||||
use crate::vector::EmbeddingConfigs;
|
||||
use crate::{
|
||||
db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult,
|
||||
ThreadPoolNoAbortBuilder,
|
||||
};
|
||||
use crate::{db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult};
|
||||
|
||||
pub(crate) struct TempIndex {
|
||||
pub inner: Index,
|
||||
@ -62,15 +59,8 @@ impl TempIndex {
|
||||
wtxn: &mut RwTxn<'t>,
|
||||
documents: Mmap,
|
||||
) -> Result<(), crate::error::Error> {
|
||||
let local_pool;
|
||||
let indexer_config = &self.indexer_config;
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let rtxn = self.inner.read_txn()?;
|
||||
let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?;
|
||||
@ -94,6 +84,7 @@ impl TempIndex {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)?;
|
||||
|
||||
if let Some(error) = operation_stats.into_iter().find_map(|stat| stat.error) {
|
||||
@ -153,15 +144,8 @@ impl TempIndex {
|
||||
wtxn: &mut RwTxn<'t>,
|
||||
external_document_ids: Vec<String>,
|
||||
) -> Result<(), crate::error::Error> {
|
||||
let local_pool;
|
||||
let indexer_config = &self.indexer_config;
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let rtxn = self.inner.read_txn()?;
|
||||
let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?;
|
||||
@ -183,6 +167,7 @@ impl TempIndex {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)?;
|
||||
|
||||
if let Some(error) = operation_stats.into_iter().find_map(|stat| stat.error) {
|
||||
@ -231,15 +216,8 @@ fn aborting_indexation() {
|
||||
let mut wtxn = index.inner.write_txn().unwrap();
|
||||
let should_abort = AtomicBool::new(false);
|
||||
|
||||
let local_pool;
|
||||
let indexer_config = &index.indexer_config;
|
||||
let pool = match &indexer_config.thread_pool {
|
||||
Some(pool) => pool,
|
||||
None => {
|
||||
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
|
||||
&local_pool
|
||||
}
|
||||
};
|
||||
let pool = &indexer_config.thread_pool;
|
||||
|
||||
let rtxn = index.inner.read_txn().unwrap();
|
||||
let db_fields_ids_map = index.inner.fields_ids_map(&rtxn).unwrap();
|
||||
@ -264,6 +242,7 @@ fn aborting_indexation() {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -54,6 +54,10 @@ impl ThreadPoolNoAbortBuilder {
|
||||
ThreadPoolNoAbortBuilder::default()
|
||||
}
|
||||
|
||||
pub fn new_for_indexing() -> ThreadPoolNoAbortBuilder {
|
||||
ThreadPoolNoAbortBuilder::default().thread_name(|index| format!("indexing-thread:{index}"))
|
||||
}
|
||||
|
||||
pub fn thread_name<F>(mut self, closure: F) -> Self
|
||||
where
|
||||
F: FnMut(usize) -> String + 'static,
|
||||
|
@ -33,7 +33,6 @@ use crate::documents::{obkv_to_object, DocumentsBatchReader};
|
||||
use crate::error::{Error, InternalError};
|
||||
use crate::index::{PrefixSearch, PrefixSettings};
|
||||
use crate::progress::Progress;
|
||||
use crate::thread_pool_no_abort::ThreadPoolNoAbortBuilder;
|
||||
pub use crate::update::index_documents::helpers::CursorClonableMmap;
|
||||
use crate::update::{
|
||||
IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst,
|
||||
@ -228,24 +227,7 @@ where
|
||||
let possible_embedding_mistakes =
|
||||
crate::vector::error::PossibleEmbeddingMistakes::new(&field_distribution);
|
||||
|
||||
let backup_pool;
|
||||
let pool = match self.indexer_config.thread_pool {
|
||||
Some(ref pool) => pool,
|
||||
None => {
|
||||
// We initialize a backup pool with the default
|
||||
// settings if none have already been set.
|
||||
#[allow(unused_mut)]
|
||||
let mut pool_builder = ThreadPoolNoAbortBuilder::new();
|
||||
|
||||
#[cfg(test)]
|
||||
{
|
||||
pool_builder = pool_builder.num_threads(1);
|
||||
}
|
||||
|
||||
backup_pool = pool_builder.build()?;
|
||||
&backup_pool
|
||||
}
|
||||
};
|
||||
let pool = &self.indexer_config.thread_pool;
|
||||
|
||||
// create LMDB writer channel
|
||||
let (lmdb_writer_sx, lmdb_writer_rx): (
|
||||
@ -1973,6 +1955,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2025,6 +2008,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2112,6 +2096,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2300,6 +2285,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2362,6 +2348,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2415,6 +2402,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2467,6 +2455,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2521,6 +2510,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2580,6 +2570,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2632,6 +2623,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2684,6 +2676,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2882,6 +2875,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2941,6 +2935,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@ -2997,6 +2992,7 @@ mod tests {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use grenad::CompressionType;
|
||||
|
||||
use super::GrenadParameters;
|
||||
use crate::thread_pool_no_abort::ThreadPoolNoAbort;
|
||||
use crate::{thread_pool_no_abort::ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexerConfig {
|
||||
@ -9,9 +9,10 @@ pub struct IndexerConfig {
|
||||
pub max_nb_chunks: Option<usize>,
|
||||
pub documents_chunk_size: Option<usize>,
|
||||
pub max_memory: Option<usize>,
|
||||
pub max_threads: Option<usize>,
|
||||
pub chunk_compression_type: CompressionType,
|
||||
pub chunk_compression_level: Option<u32>,
|
||||
pub thread_pool: Option<ThreadPoolNoAbort>,
|
||||
pub thread_pool: ThreadPoolNoAbort,
|
||||
pub max_positions_per_attributes: Option<u32>,
|
||||
pub skip_index_budget: bool,
|
||||
}
|
||||
@ -27,16 +28,39 @@ impl IndexerConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// By default use only 1 thread for indexing in tests
|
||||
#[cfg(test)]
|
||||
pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option<usize>) {
|
||||
let pool = ThreadPoolNoAbortBuilder::new_for_indexing()
|
||||
.num_threads(1)
|
||||
.build()
|
||||
.expect("failed to build default rayon thread pool");
|
||||
|
||||
(pool, Some(1))
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option<usize>) {
|
||||
let pool = ThreadPoolNoAbortBuilder::new_for_indexing()
|
||||
.build()
|
||||
.expect("failed to build default rayon thread pool");
|
||||
|
||||
(pool, None)
|
||||
}
|
||||
|
||||
impl Default for IndexerConfig {
|
||||
fn default() -> Self {
|
||||
let (thread_pool, max_threads) = default_thread_pool_and_threads();
|
||||
|
||||
Self {
|
||||
max_threads,
|
||||
thread_pool,
|
||||
log_every_n: None,
|
||||
max_nb_chunks: None,
|
||||
documents_chunk_size: None,
|
||||
max_memory: None,
|
||||
chunk_compression_type: CompressionType::None,
|
||||
chunk_compression_level: None,
|
||||
thread_pool: None,
|
||||
max_positions_per_attributes: None,
|
||||
skip_index_budget: false,
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ pub use self::concurrent_available_ids::ConcurrentAvailableIds;
|
||||
pub use self::facet::bulk::FacetsUpdateBulk;
|
||||
pub use self::facet::incremental::FacetsUpdateIncrementalInner;
|
||||
pub use self::index_documents::*;
|
||||
pub use self::indexer_config::IndexerConfig;
|
||||
pub use self::indexer_config::{default_thread_pool_and_threads, IndexerConfig};
|
||||
pub use self::new::ChannelCongestion;
|
||||
pub use self::settings::{validate_embedding_settings, Setting, Settings};
|
||||
pub use self::update_step::UpdateIndexingStep;
|
||||
|
@ -8,7 +8,7 @@ use hashbrown::HashMap;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::super::cache::BalancedCaches;
|
||||
use super::facet_document::extract_document_facets;
|
||||
use super::facet_document::{extract_document_facets, extract_geo_document};
|
||||
use super::FacetKind;
|
||||
use crate::fields_ids_map::metadata::Metadata;
|
||||
use crate::filterable_attributes_rules::match_faceted_field;
|
||||
@ -90,17 +90,12 @@ impl FacetedDocidsExtractor {
|
||||
let mut cached_sorter = context.data.borrow_mut_or_yield();
|
||||
let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc);
|
||||
let docid = document_change.docid();
|
||||
let res = match document_change {
|
||||
DocumentChange::Deletion(inner) => extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
|
||||
// Using a macro avoid borrowing the parameters as mutable in both closures at
|
||||
// the same time by postponing their creation
|
||||
macro_rules! facet_fn {
|
||||
(del) => {
|
||||
|fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
@ -114,91 +109,10 @@ impl FacetedDocidsExtractor {
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
),
|
||||
DocumentChange::Update(inner) => {
|
||||
let has_changed = inner.has_changed_for_fields(
|
||||
&mut |field_name| {
|
||||
match_faceted_field(
|
||||
field_name,
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
)
|
||||
},
|
||||
rtxn,
|
||||
index,
|
||||
context.db_fields_ids_map,
|
||||
)?;
|
||||
let has_changed_for_geo_fields =
|
||||
inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?;
|
||||
if !has_changed && !has_changed_for_geo_fields {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
BalancedCaches::insert_del_u32,
|
||||
&mut del_add_facet_value,
|
||||
DelAddFacetValue::insert_del,
|
||||
docid,
|
||||
fid,
|
||||
meta,
|
||||
filterable_attributes,
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
)?;
|
||||
|
||||
extract_document_facets(
|
||||
inner.merged(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
BalancedCaches::insert_add_u32,
|
||||
&mut del_add_facet_value,
|
||||
DelAddFacetValue::insert_add,
|
||||
docid,
|
||||
fid,
|
||||
meta,
|
||||
filterable_attributes,
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
DocumentChange::Insertion(inner) => extract_document_facets(
|
||||
inner.inserted(),
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
is_geo_enabled,
|
||||
&mut |fid, meta, depth, value| {
|
||||
};
|
||||
(add) => {
|
||||
|fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| {
|
||||
Self::facet_fn_with_options(
|
||||
&context.doc_alloc,
|
||||
cached_sorter.deref_mut(),
|
||||
@ -212,12 +126,116 @@ impl FacetedDocidsExtractor {
|
||||
depth,
|
||||
value,
|
||||
)
|
||||
},
|
||||
),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
match document_change {
|
||||
DocumentChange::Deletion(inner) => {
|
||||
let mut del = facet_fn!(del);
|
||||
|
||||
extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut del,
|
||||
)?;
|
||||
|
||||
if is_geo_enabled {
|
||||
extract_geo_document(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut del,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
DocumentChange::Update(inner) => {
|
||||
let has_changed_for_facets = inner.has_changed_for_fields(
|
||||
&mut |field_name| {
|
||||
match_faceted_field(
|
||||
field_name,
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
)
|
||||
},
|
||||
rtxn,
|
||||
index,
|
||||
context.db_fields_ids_map,
|
||||
)?;
|
||||
|
||||
// 1. Maybe update doc
|
||||
if has_changed_for_facets {
|
||||
extract_document_facets(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut facet_fn!(del),
|
||||
)?;
|
||||
|
||||
extract_document_facets(
|
||||
inner.merged(rtxn, index, context.db_fields_ids_map)?,
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut facet_fn!(add),
|
||||
)?;
|
||||
}
|
||||
|
||||
// 2. Maybe update geo
|
||||
if is_geo_enabled
|
||||
&& inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?
|
||||
{
|
||||
extract_geo_document(
|
||||
inner.current(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut facet_fn!(del),
|
||||
)?;
|
||||
extract_geo_document(
|
||||
inner.merged(rtxn, index, context.db_fields_ids_map)?,
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut facet_fn!(add),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
DocumentChange::Insertion(inner) => {
|
||||
let mut add = facet_fn!(add);
|
||||
|
||||
extract_document_facets(
|
||||
inner.inserted(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
filterable_attributes,
|
||||
sortable_fields,
|
||||
asc_desc_fields,
|
||||
distinct_field,
|
||||
&mut add,
|
||||
)?;
|
||||
|
||||
if is_geo_enabled {
|
||||
extract_geo_document(
|
||||
inner.inserted(),
|
||||
inner.external_document_id(),
|
||||
new_fields_ids_map.deref_mut(),
|
||||
&mut add,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
del_add_facet_value.send_data(docid, sender, &context.doc_alloc).unwrap();
|
||||
res
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
|
@ -16,13 +16,11 @@ use crate::filterable_attributes_rules::match_faceted_field;
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn extract_document_facets<'doc>(
|
||||
document: impl Document<'doc>,
|
||||
external_document_id: &str,
|
||||
field_id_map: &mut GlobalFieldsIdsMap,
|
||||
filterable_attributes: &[FilterableAttributesRule],
|
||||
sortable_fields: &HashSet<String>,
|
||||
asc_desc_fields: &HashSet<String>,
|
||||
distinct_field: &Option<String>,
|
||||
is_geo_enabled: bool,
|
||||
facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
// return the match result for the given field name.
|
||||
@ -102,17 +100,24 @@ pub fn extract_document_facets<'doc>(
|
||||
}
|
||||
}
|
||||
|
||||
if is_geo_enabled {
|
||||
if let Some(geo_value) = document.geo_field()? {
|
||||
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
|
||||
let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map
|
||||
.id_with_metadata_or_insert("_geo.lat")
|
||||
.zip(field_id_map.id_with_metadata_or_insert("_geo.lng"))
|
||||
.ok_or(UserError::AttributeLimitReached)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?;
|
||||
facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?;
|
||||
}
|
||||
pub fn extract_geo_document<'doc>(
|
||||
document: impl Document<'doc>,
|
||||
external_document_id: &str,
|
||||
field_id_map: &mut GlobalFieldsIdsMap,
|
||||
facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>,
|
||||
) -> Result<()> {
|
||||
if let Some(geo_value) = document.geo_field()? {
|
||||
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
|
||||
let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map
|
||||
.id_with_metadata_or_insert("_geo.lat")
|
||||
.zip(field_id_map.id_with_metadata_or_insert("_geo.lng"))
|
||||
.ok_or(UserError::AttributeLimitReached)?;
|
||||
|
||||
facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?;
|
||||
facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use blake2::{Blake2b512, Digest};
|
||||
use bumpalo::collections::CollectIn;
|
||||
use bumpalo::Bump;
|
||||
use bumparaw_collections::RawMap;
|
||||
@ -71,6 +72,7 @@ impl<'pl> DocumentOperation<'pl> {
|
||||
new_fields_ids_map: &mut FieldsIdsMap,
|
||||
must_stop_processing: &MSP,
|
||||
progress: Progress,
|
||||
shards: &[&str],
|
||||
) -> Result<(DocumentOperationChanges<'pl>, Vec<PayloadStats>, Option<PrimaryKey<'pl>>)>
|
||||
where
|
||||
MSP: Fn() -> bool,
|
||||
@ -108,6 +110,7 @@ impl<'pl> DocumentOperation<'pl> {
|
||||
&docids_version_offsets,
|
||||
IndexDocumentsMethod::ReplaceDocuments,
|
||||
payload,
|
||||
shards,
|
||||
),
|
||||
Payload::Update(payload) => extract_addition_payload_changes(
|
||||
indexer,
|
||||
@ -121,6 +124,7 @@ impl<'pl> DocumentOperation<'pl> {
|
||||
&docids_version_offsets,
|
||||
IndexDocumentsMethod::UpdateDocuments,
|
||||
payload,
|
||||
shards,
|
||||
),
|
||||
Payload::Deletion(to_delete) => extract_deletion_payload_changes(
|
||||
index,
|
||||
@ -128,6 +132,7 @@ impl<'pl> DocumentOperation<'pl> {
|
||||
&mut available_docids,
|
||||
&docids_version_offsets,
|
||||
to_delete,
|
||||
shards,
|
||||
),
|
||||
};
|
||||
|
||||
@ -174,6 +179,7 @@ fn extract_addition_payload_changes<'r, 'pl: 'r>(
|
||||
main_docids_version_offsets: &hashbrown::HashMap<&'pl str, PayloadOperations<'pl>>,
|
||||
method: IndexDocumentsMethod,
|
||||
payload: &'pl [u8],
|
||||
shards: &[&str],
|
||||
) -> Result<hashbrown::HashMap<&'pl str, PayloadOperations<'pl>>> {
|
||||
use IndexDocumentsMethod::{ReplaceDocuments, UpdateDocuments};
|
||||
|
||||
@ -184,7 +190,7 @@ fn extract_addition_payload_changes<'r, 'pl: 'r>(
|
||||
while let Some(doc) = iter.next().transpose().map_err(InternalError::SerdeJson)? {
|
||||
*bytes = previous_offset as u64;
|
||||
|
||||
// Only guess the primary key if it is the first document
|
||||
// Only guess the primary key if it is the first document and whatever the shard is
|
||||
let retrieved_primary_key = if previous_offset == 0 {
|
||||
let doc = RawMap::from_raw_value_and_hasher(doc, FxBuildHasher, indexer)
|
||||
.map(Some)
|
||||
@ -213,6 +219,11 @@ fn extract_addition_payload_changes<'r, 'pl: 'r>(
|
||||
let external_id =
|
||||
retrieved_primary_key.extract_fields_and_docid(doc, new_fields_ids_map, indexer)?;
|
||||
|
||||
if must_be_skipped(external_id.to_de(), shards) {
|
||||
previous_offset = iter.byte_offset();
|
||||
continue;
|
||||
}
|
||||
|
||||
let external_id = external_id.to_de();
|
||||
let current_offset = iter.byte_offset();
|
||||
let document_offset = DocumentOffset { content: &payload[previous_offset..current_offset] };
|
||||
@ -330,10 +341,15 @@ fn extract_deletion_payload_changes<'s, 'pl: 's>(
|
||||
available_docids: &mut AvailableIds,
|
||||
main_docids_version_offsets: &hashbrown::HashMap<&'s str, PayloadOperations<'pl>>,
|
||||
to_delete: &'pl [&'pl str],
|
||||
shards: &[&str],
|
||||
) -> Result<hashbrown::HashMap<&'s str, PayloadOperations<'pl>>> {
|
||||
let mut new_docids_version_offsets = hashbrown::HashMap::<&str, PayloadOperations<'pl>>::new();
|
||||
|
||||
for external_id in to_delete {
|
||||
if must_be_skipped(external_id, shards) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match main_docids_version_offsets.get(external_id) {
|
||||
None => {
|
||||
match index.external_documents_ids().get(rtxn, external_id) {
|
||||
@ -612,3 +628,25 @@ pub fn first_update_pointer(docops: &[InnerDocOp]) -> Option<usize> {
|
||||
InnerDocOp::Deletion => None,
|
||||
})
|
||||
}
|
||||
|
||||
fn must_be_skipped(pk: &str, shards: &[&str]) -> bool {
|
||||
// Special case for no shard, it means we must index the document
|
||||
if shards.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If there is multiple shards, the fisrt shard is ourselves
|
||||
let mut hasher = Blake2b512::new();
|
||||
hasher.update(shards[0].as_bytes());
|
||||
hasher.update(pk.as_bytes());
|
||||
let me = hasher.finalize();
|
||||
|
||||
shards.iter().skip(1).any(|shard| {
|
||||
let mut hasher = Blake2b512::new();
|
||||
hasher.update(shard.as_bytes());
|
||||
hasher.update(pk.as_bytes());
|
||||
let them = hasher.finalize();
|
||||
|
||||
me < them
|
||||
})
|
||||
}
|
||||
|
@ -333,7 +333,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.primary_key = Setting::Set(primary_key);
|
||||
}
|
||||
|
||||
pub fn set_autorize_typos(&mut self, val: bool) {
|
||||
pub fn set_authorize_typos(&mut self, val: bool) {
|
||||
self.authorize_typos = Setting::Set(val);
|
||||
}
|
||||
|
||||
|
@ -792,7 +792,7 @@ fn test_disable_typo() {
|
||||
|
||||
index
|
||||
.update_settings_using_wtxn(&mut txn, |settings| {
|
||||
settings.set_autorize_typos(false);
|
||||
settings.set_authorize_typos(false);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
@ -59,6 +59,7 @@ fn test_facet_distribution_with_no_facet_values() {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -95,6 +95,7 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -329,6 +329,7 @@ fn criteria_ascdesc() {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
@ -138,6 +138,7 @@ fn test_typo_disabled_on_word() {
|
||||
&mut new_fields_ids_map,
|
||||
&|| false,
|
||||
Progress::default(),
|
||||
&[],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
Reference in New Issue
Block a user