Compare commits

..

1 Commits

Author SHA1 Message Date
4d4ab62e42 Fail upgrade for indexes starting with fail-
- take a long time to upgrade for indexes starting with `long-`
2025-06-25 21:51:55 +02:00
68 changed files with 3048 additions and 3540 deletions

View File

@ -4,22 +4,22 @@ on:
pull_request:
types: [opened, synchronize, reopened, labeled, unlabeled]
env:
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
jobs:
check-labels:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Check db change labels
id: check_labels
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels
echo ${{ github.event.pull_request.number }}
echo $URL
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
echo "Labels: $LABELS"
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then
echo "::error::Pull request must contain either the 'db change' or 'no db change' label."
exit 1

View File

@ -57,17 +57,9 @@ This command will be triggered to each PR as a requirement for merging it.
You can set the `LINDERA_CACHE` environment variable to speed up your successive builds by up to 2 minutes.
It'll store some built artifacts in the directory of your choice.
We recommend using the `$HOME/.cache/meili/lindera` directory:
We recommend using the standard `$HOME/.cache/lindera` directory:
```sh
export LINDERA_CACHE=$HOME/.cache/meili/lindera
```
You can set the `MILLI_BENCH_DATASETS_PATH` environment variable to further speed up your builds.
It'll store some big files used for the benchmarks in the directory of your choice.
We recommend using the `$HOME/.cache/meili/benches` directory:
```sh
export MILLI_BENCH_DATASETS_PATH=$HOME/.cache/meili/benches
export LINDERA_CACHE=$HOME/.cache/lindera
```
Furthermore, you can improve incremental compilation by setting the `MEILI_NO_VERGEN` environment variable.

2
Cargo.lock generated
View File

@ -3722,8 +3722,6 @@ dependencies = [
"insta",
"md5",
"once_cell",
"regex-lite",
"uuid",
]
[[package]]

View File

@ -5,7 +5,7 @@ use meilisearch_types::milli::documents::PrimaryKey;
use meilisearch_types::milli::progress::Progress;
use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction};
use meilisearch_types::milli::update::DocumentAdditionResult;
use meilisearch_types::milli::{self, ChannelCongestion, Filter};
use meilisearch_types::milli::{self, ChannelCongestion, Filter, ThreadPoolNoAbortBuilder};
use meilisearch_types::settings::apply_settings_to_builder;
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
use meilisearch_types::Index;
@ -113,8 +113,18 @@ impl IndexScheduler {
}
}
let local_pool;
let indexer_config = self.index_mapper.indexer_config();
let pool = &indexer_config.thread_pool;
let pool = match &indexer_config.thread_pool {
Some(pool) => pool,
None => {
local_pool = ThreadPoolNoAbortBuilder::new()
.thread_name(|i| format!("indexing-thread-{i}"))
.build()
.unwrap();
&local_pool
}
};
progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges);
let (document_changes, operation_stats, primary_key) = indexer
@ -256,8 +266,18 @@ impl IndexScheduler {
let mut congestion = None;
if task.error.is_none() {
let local_pool;
let indexer_config = self.index_mapper.indexer_config();
let pool = &indexer_config.thread_pool;
let pool = match &indexer_config.thread_pool {
Some(pool) => pool,
None => {
local_pool = ThreadPoolNoAbortBuilder::new()
.thread_name(|i| format!("indexing-thread-{i}"))
.build()
.unwrap();
&local_pool
}
};
let candidates_count = candidates.len();
progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges);
@ -409,8 +429,18 @@ impl IndexScheduler {
let mut congestion = None;
if !tasks.iter().all(|res| res.error.is_some()) {
let local_pool;
let indexer_config = self.index_mapper.indexer_config();
let pool = &indexer_config.thread_pool;
let pool = match &indexer_config.thread_pool {
Some(pool) => pool,
None => {
local_pool = ThreadPoolNoAbortBuilder::new()
.thread_name(|i| format!("indexing-thread-{i}"))
.build()
.unwrap();
&local_pool
}
};
progress.update_progress(DocumentDeletionProgress::DeleteDocuments);
let mut indexer = indexer::DocumentDeletion::new();

View File

@ -25,6 +25,13 @@ impl IndexScheduler {
i as u32,
indexes.len() as u32,
));
if uid.starts_with("long-") {
tracing::warn!("taking a long time to upgrade for test purposes");
std::thread::sleep(std::time::Duration::from_secs(1200));
}
if uid.starts_with("fail-") {
panic!("failing for test purposes");
}
let index = self.index(uid)?;
let mut index_wtxn = index.write_txn()?;
let regen_stats = milli::update::upgrade::upgrade(

View File

@ -15,5 +15,3 @@ license.workspace = true
insta = { version = "=1.39.0", features = ["json", "redactions"] }
md5 = "0.7.0"
once_cell = "1.20"
regex-lite = "0.1.6"
uuid = { version = "1.17.0", features = ["v4"] }

View File

@ -4,16 +4,9 @@ use std::path::{Path, PathBuf};
use std::sync::Mutex;
pub use insta;
use insta::internals::{Content, ContentPath};
use once_cell::sync::Lazy;
use regex_lite::Regex;
static SNAPSHOT_NAMES: Lazy<Mutex<HashMap<PathBuf, usize>>> = Lazy::new(Mutex::default);
/// A regex to match UUIDs in messages, specifically looking for the UUID v4 format
static UUID_IN_MESSAGE_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}")
.unwrap()
});
/// Return the md5 hash of the given string
pub fn hash_snapshot(snap: &str) -> String {
@ -33,39 +26,6 @@ pub fn default_snapshot_settings_for_test<'a>(
let filename = path.file_name().unwrap().to_str().unwrap();
settings.set_omit_expression(true);
fn uuid_in_message_redaction(content: Content, _content_path: ContentPath) -> Content {
match &content {
Content::String(s) => {
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]");
Content::String(uuid_replaced.to_string())
}
_ => content,
}
}
fn uuid_in_json_key_redaction(content: Content, _content_path: ContentPath) -> Content {
match content {
Content::Map(map) => {
let new_map = map
.iter()
.map(|(key, value)| match key {
Content::String(s) => {
let uuid_replaced = UUID_IN_MESSAGE_RE.replace_all(s, "[uuid]");
(Content::String(uuid_replaced.to_string()), value.clone())
}
_ => (key.clone(), value.clone()),
})
.collect();
Content::Map(new_map)
}
_ => content,
}
}
settings.add_dynamic_redaction(".**.message", uuid_in_message_redaction);
settings.add_dynamic_redaction(".**.indexUid", uuid_in_message_redaction);
settings.add_dynamic_redaction(".**.facetsByIndex", uuid_in_json_key_redaction);
let test_name = test_name.strip_suffix("::{{closure}}").unwrap_or(test_name);
let test_name = test_name.rsplit("::").next().unwrap().to_owned();
@ -272,9 +232,6 @@ macro_rules! json_string {
#[cfg(test)]
mod tests {
use crate as meili_snap;
use crate::UUID_IN_MESSAGE_RE;
use uuid::Uuid;
#[test]
fn snap() {
snapshot_hash!(10, @"d3d9446802a44259755d38e6d163e820");
@ -322,14 +279,4 @@ mod tests {
// snapshot_hash!("", name: "", @"d41d8cd98f00b204e9800998ecf8427e");
}
}
#[test]
fn uuid_in_message_regex() {
let uuid1 = Uuid::new_v4();
let uuid2 = Uuid::new_v4();
let uuid3 = Uuid::new_v4();
let to_replace = format!("1 {uuid1} 2 {uuid2} 3 {uuid3} 4");
let replaced = UUID_IN_MESSAGE_RE.replace_all(to_replace.as_str(), "[uuid]");
assert_eq!(replaced, "1 [uuid] 2 [uuid] 3 [uuid] 4");
}
}

View File

@ -697,7 +697,7 @@ pub fn apply_settings_to_builder(
match typo_tolerance {
Setting::Set(ref value) => {
match value.enabled {
Setting::Set(val) => builder.set_authorize_typos(val),
Setting::Set(val) => builder.set_autorize_typos(val),
Setting::Reset => builder.reset_authorize_typos(),
Setting::NotSet => (),
}

View File

@ -120,7 +120,7 @@ actix-web-lab = { version = "0.24.1", default-features = false }
actix-rt = "2.10.0"
brotli = "6.0.0"
# fixed version due to format breakages in v1.40
insta = { version = "=1.39.0", features = ["redactions"] }
insta = "=1.39.0"
manifest-dir-macros = "0.1.18"
maplit = "1.0.2"
meili-snap = { path = "../meili-snap" }

View File

@ -37,9 +37,7 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
use meilisearch_auth::{open_auth_store_env, AuthController};
use meilisearch_types::milli::constants::VERSION_MAJOR;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use meilisearch_types::milli::update::{
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
};
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
use meilisearch_types::settings::apply_settings_to_builder;
use meilisearch_types::tasks::KindWithContent;
use meilisearch_types::versioning::{
@ -503,19 +501,7 @@ fn import_dump(
let network = dump_reader.network()?.cloned().unwrap_or_default();
index_scheduler.put_network(network)?;
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
let backup_config;
let base_config = index_scheduler.indexer_config();
let indexer_config = if base_config.max_threads.is_none() {
let (thread_pool, _) = default_thread_pool_and_threads();
let _config = IndexerConfig { thread_pool, ..*base_config };
backup_config = _config;
&backup_config
} else {
base_config
};
let indexer_config = index_scheduler.indexer_config();
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
// try to process tasks while we're trying to import the indexes.

View File

@ -761,12 +761,10 @@ impl IndexerOpts {
max_indexing_memory.to_string(),
);
}
if let Some(max_indexing_threads) = max_indexing_threads.0 {
export_to_env_if_not_present(
MEILI_MAX_INDEXING_THREADS,
max_indexing_threads.to_string(),
);
}
export_to_env_if_not_present(
MEILI_MAX_INDEXING_THREADS,
max_indexing_threads.0.to_string(),
);
}
}
@ -774,15 +772,15 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
type Error = anyhow::Error;
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
let thread_pool = ThreadPoolNoAbortBuilder::new_for_indexing()
.num_threads(other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2))
let thread_pool = ThreadPoolNoAbortBuilder::new()
.thread_name(|index| format!("indexing-thread:{index}"))
.num_threads(*other.max_indexing_threads)
.build()?;
Ok(Self {
thread_pool,
log_every_n: Some(DEFAULT_LOG_EVERY_N),
max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize),
max_threads: *other.max_indexing_threads,
thread_pool: Some(thread_pool),
max_positions_per_attributes: None,
skip_index_budget: other.skip_index_budget,
..Default::default()
@ -845,31 +843,31 @@ fn total_memory_bytes() -> Option<u64> {
}
}
#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)]
pub struct MaxThreads(Option<usize>);
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct MaxThreads(usize);
impl FromStr for MaxThreads {
type Err = ParseIntError;
fn from_str(s: &str) -> Result<MaxThreads, Self::Err> {
if s.is_empty() || s == "unlimited" {
return Ok(MaxThreads::default());
}
usize::from_str(s).map(Some).map(MaxThreads)
fn from_str(s: &str) -> Result<Self, Self::Err> {
usize::from_str(s).map(Self)
}
}
impl Default for MaxThreads {
fn default() -> Self {
MaxThreads(num_cpus::get() / 2)
}
}
impl fmt::Display for MaxThreads {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
Some(threads) => write!(f, "{}", threads),
None => write!(f, "unlimited"),
}
write!(f, "{}", self.0)
}
}
impl Deref for MaxThreads {
type Target = Option<usize>;
type Target = usize;
fn deref(&self) -> &Self::Target {
&self.0

View File

@ -1,242 +0,0 @@
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use deserr::actix_web::{AwebJson, AwebQueryParameter};
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::actions;
use meilisearch_types::serde_cs::vec::CS;
use serde_json::Value;
use tracing::debug;
use utoipa::{IntoParams, OpenApi};
use super::ActionPolicy;
use crate::analytics::Analytics;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::indexes::similar_analytics::{SimilarAggregator, SimilarGET, SimilarPOST};
use crate::search::{
add_search_rules, perform_similar, RankingScoreThresholdSimilar, RetrieveVectors, Route,
SearchKind, SimilarQuery, SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
};
#[derive(OpenApi)]
#[openapi(
paths(similar_get, similar_post),
tags(
(
name = "Duplicate an index",
description = "The /duplicate route clones an index",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/duplicate"),
),
),
)]
pub struct DuplicateApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(duplicate))));
}
/// Duplicate an index
#[utoipa::path(
post,
path = "{indexUid}/duplicate",
tag = "Duplicate an index",
security(("Bearer" = ["settings", "documents", "*"])),
params(("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = DuplicateQuery,
responses(
(status = 200, description = "The documents are returned", body = SimilarResult, content_type = "application/json", example = json!(
{
"hits": [
{
"id": 2770,
"title": "American Pie 2",
"poster": "https://image.tmdb.org/t/p/w1280/q4LNgUnRfltxzp3gf1MAGiK5LhV.jpg",
"overview": "The whole gang are back and as close as ever. They decide to get even closer by spending the summer together at a beach house. They decide to hold the biggest…",
"release_date": 997405200
},
{
"id": 190859,
"title": "American Sniper",
"poster": "https://image.tmdb.org/t/p/w1280/svPHnYE7N5NAGO49dBmRhq0vDQ3.jpg",
"overview": "U.S. Navy SEAL Chris Kyle takes his sole mission—protect his comrades—to heart and becomes one of the most lethal snipers in American history. His pinpoint accuracy not only saves countless lives but also makes him a prime…",
"release_date": 1418256000
}
],
"offset": 0,
"limit": 2,
"estimatedTotalHits": 976,
"processingTimeMs": 35,
"query": "american "
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn similar_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: AwebJson<DuplicateQuery, DeserrJsonError>,
req: HttpRequest,
analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let query = params.into_inner();
debug!(parameters = ?query, "Similar post");
let mut aggregate = SimilarAggregator::<SimilarPOST>::from_query(&query);
let similar = similar(index_scheduler, index_uid, query).await;
if let Ok(similar) = &similar {
aggregate.succeed(similar);
}
analytics.publish(aggregate, &req);
let similar = similar?;
debug!(returns = ?similar, "Similar post");
Ok(HttpResponse::Ok().json(similar))
}
async fn similar(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: IndexUid,
mut query: SimilarQuery,
) -> Result<SimilarResult, ResponseError> {
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
add_search_rules(&mut query.filter, search_rules);
}
let index = index_scheduler.index(&index_uid)?;
let (embedder_name, embedder, quantized) = SearchKind::embedder(
&index_scheduler,
index_uid.to_string(),
&index,
&query.embedder,
None,
Route::Similar,
)?;
tokio::task::spawn_blocking(move || {
perform_similar(
&index,
query,
embedder_name,
embedder,
quantized,
retrieve_vectors,
index_scheduler.features(),
)
})
.await?
}
#[derive(Debug, deserr::Deserr, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(parameter_in = Query)]
pub struct SimilarQueryGet {
#[deserr(error = DeserrQueryParamError<InvalidSimilarId>)]
#[param(value_type = String)]
id: Param<String>,
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSimilarOffset>)]
#[param(value_type = usize, default = DEFAULT_SEARCH_OFFSET)]
offset: Param<usize>,
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSimilarLimit>)]
#[param(value_type = usize, default = DEFAULT_SEARCH_LIMIT)]
limit: Param<usize>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarAttributesToRetrieve>)]
#[param(value_type = Vec<String>)]
attributes_to_retrieve: Option<CS<String>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRetrieveVectors>)]
#[param(value_type = bool, default)]
retrieve_vectors: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarFilter>)]
filter: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarShowRankingScore>)]
#[param(value_type = bool, default)]
show_ranking_score: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarShowRankingScoreDetails>)]
#[param(value_type = bool, default)]
show_ranking_score_details: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRankingScoreThreshold>, default)]
#[param(value_type = Option<f32>)]
pub ranking_score_threshold: Option<RankingScoreThresholdGet>,
#[deserr(error = DeserrQueryParamError<InvalidSimilarEmbedder>)]
pub embedder: String,
}
#[derive(Debug, Clone, Copy, PartialEq, deserr::Deserr)]
#[deserr(try_from(String) = TryFrom::try_from -> InvalidSimilarRankingScoreThreshold)]
pub struct RankingScoreThresholdGet(RankingScoreThresholdSimilar);
impl std::convert::TryFrom<String> for RankingScoreThresholdGet {
type Error = InvalidSimilarRankingScoreThreshold;
fn try_from(s: String) -> Result<Self, Self::Error> {
let f: f64 = s.parse().map_err(|_| InvalidSimilarRankingScoreThreshold)?;
Ok(RankingScoreThresholdGet(RankingScoreThresholdSimilar::try_from(f)?))
}
}
impl From<SimilarQueryGet> for SimilarQuery {
fn from(
SimilarQueryGet {
id,
offset,
limit,
attributes_to_retrieve,
retrieve_vectors,
filter,
show_ranking_score,
show_ranking_score_details,
embedder,
ranking_score_threshold,
}: SimilarQueryGet,
) -> Self {
let filter = match filter {
Some(f) => match serde_json::from_str(&f) {
Ok(v) => Some(v),
_ => Some(Value::String(f)),
},
None => None,
};
SimilarQuery {
id: serde_json::Value::String(id.0),
offset: offset.0,
limit: limit.0,
filter,
embedder,
attributes_to_retrieve: attributes_to_retrieve.map(|o| o.into_iter().collect()),
retrieve_vectors: retrieve_vectors.0,
show_ranking_score: show_ranking_score.0,
show_ranking_score_details: show_ranking_score_details.0,
ranking_score_threshold: ranking_score_threshold.map(|x| x.0),
}
}
}

View File

@ -29,7 +29,6 @@ use crate::routes::is_dry_run;
use crate::Opt;
pub mod documents;
pub mod duplicate;
pub mod facet_search;
pub mod search;
mod search_analytics;
@ -78,8 +77,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/search").configure(search::configure))
.service(web::scope("/facet-search").configure(facet_search::configure))
.service(web::scope("/similar").configure(similar::configure))
.service(web::scope("/settings").configure(settings::configure))
.service(web::scope("/duplicate").configure(duplicate::configure)),
.service(web::scope("/settings").configure(settings::configure)),
);
}

View File

@ -1,5 +1,5 @@
//! This file implements a queue of searches to process and the ability to control how many searches can be run in parallel.
//! We need this because we don't want to process more search requests than the available CPU cores.
//! We need this because we don't want to process more search requests than we have cores.
//! That slows down everything and consumes RAM for no reason.
//! The steps to do a search are to get the `SearchQueue` data structure and try to get a search permit.
//! This can fail if the queue is full, and we need to drop your search request to register a new one.
@ -8,7 +8,7 @@
//!
//! In order to do a search request you should try to get a search permit.
//! Retrieve the `SearchQueue` structure from actix-web (`search_queue: Data<SearchQueue>`)
//! and right before processing the search, call the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
//! and right before processing the search, calls the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
//!
//! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel.
//! Then, the queue/scheduler is going to either:
@ -121,12 +121,12 @@ impl SearchQueue {
let mut queue: Vec<oneshot::Sender<Permit>> = Default::default();
let mut rng: StdRng = StdRng::from_entropy();
let mut searches_running: usize = 0;
// By having a capacity of parallelism we ensure that every time a search finish it can release its RAM asap
// By having a capacity of parallelism we ensures that every time a search finish it can release its RAM asap
let (sender, mut search_finished) = mpsc::channel(parallelism.into());
loop {
tokio::select! {
// biased select because we want to free up space before trying to register new tasks
// biased select because we wants to free up space before trying to register new tasks
biased;
_ = search_finished.recv() => {
searches_running = searches_running.saturating_sub(1);
@ -148,11 +148,11 @@ impl SearchQueue {
if searches_running < usize::from(parallelism) && queue.is_empty() {
searches_running += 1;
// if the search requests die, it's not a hard error on our side
// if the search requests die it's not a hard error on our side
let _ = search_request.send(Permit { sender: sender.clone() });
continue;
} else if capacity == 0 {
// in the very specific case where we have a capacity of zero,
// in the very specific case where we have a capacity of zero
// we must refuse the request straight away without going through
// the queue stuff.
drop(search_request);
@ -183,7 +183,7 @@ impl SearchQueue {
.map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))?;
// If we've been for more than one minute to get a search permit, it's better to simply
// abort the search request than spending time processing something where the client
// abort the search request than spending time processing something were the client
// most certainly exited or got a timeout a long time ago.
// We may find a better solution in https://github.com/actix/actix-web/issues/3462.
if now.elapsed() > self.time_to_abort {

View File

@ -538,7 +538,7 @@ async fn error_add_api_key_parameters_uid_already_exist() {
let (response, code) = server.add_api_key(content).await;
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
{
"message": "`uid` field value `[uuid]` is already an existing API key.",
"message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.",
"code": "api_key_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#api_key_already_exists"

View File

@ -29,10 +29,6 @@ impl<'a> Index<'a, Owned> {
}
}
pub fn with_encoder(&self, encoder: Encoder) -> Index<'a, Owned> {
Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData }
}
pub async fn load_test_set(&self) -> u64 {
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
let (response, code) = self
@ -294,20 +290,6 @@ impl Index<'_, Shared> {
}
(task, code)
}
pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
let (mut task, code) = self._update(primary_key).await;
if code.is_success() {
task = self.wait_task(task.uid()).await;
if task.is_success() {
panic!(
"`update_index_fail` succeeded: {}",
serde_json::to_string_pretty(&task).unwrap()
);
}
}
(task, code)
}
}
#[allow(dead_code)]
@ -351,14 +333,6 @@ impl<State> Index<'_, State> {
self.service.post_encoded("/indexes", body, self.encoder).await
}
pub(super) async fn _update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
let body = json!({
"primaryKey": primary_key,
});
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, body, self.encoder).await
}
pub(super) async fn _delete(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
self.service.delete(url).await

View File

@ -128,8 +128,7 @@ impl Display for Value {
".finishedAt" => "[date]",
".duration" => "[duration]",
".processingTimeMs" => "[duration]",
".details.embedders.*.url" => "[url]",
".details.dumpUid" => "[dump_uid]",
".details.embedders.*.url" => "[url]"
})
)
}
@ -265,24 +264,6 @@ pub static SCORE_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
])
});
pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shared> {
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
INDEX.get_or_init(|| async {
let server = Server::new_shared();
let index = server._index("SHARED_SCORE_DOCUMENTS").to_shared();
let documents = SCORE_DOCUMENTS.clone();
let (response, _code) = index._add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
._update_settings(
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
index
}).await
}
pub static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
@ -352,7 +333,7 @@ pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Sha
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
._update_settings(
json!({"filterableAttributes": ["father", "doggos", "cattos"], "sortableAttributes": ["doggos"]}),
json!({"filterableAttributes": ["father", "doggos"], "sortableAttributes": ["doggos"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
@ -454,57 +435,3 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> {
})
.await
}
pub static GEO_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
"id": 1,
"name": "Taco Truck",
"address": "444 Salsa Street, Burritoville",
"type": "Mexican",
"rating": 9,
"_geo": {
"lat": 34.0522,
"lng": -118.2437
}
},
{
"id": 2,
"name": "La Bella Italia",
"address": "456 Elm Street, Townsville",
"type": "Italian",
"rating": 9,
"_geo": {
"lat": "45.4777599",
"lng": "9.1967508"
}
},
{
"id": 3,
"name": "Crêpe Truck",
"address": "2 Billig Avenue, Rouenville",
"type": "French",
"rating": 10
}
])
});
pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared> {
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
INDEX
.get_or_init(|| async {
let server = Server::new_shared();
let index = server._index("SHARED_GEO_DOCUMENTS").to_shared();
let (response, _code) = index._add_documents(GEO_DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
._update_settings(
json!({"filterableAttributes": ["_geo"], "sortableAttributes": ["_geo"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
index
})
.await
}

View File

@ -347,16 +347,6 @@ impl<State> Server<State> {
}
}
pub fn unique_index_with_prefix(&self, prefix: &str) -> Index<'_> {
let uuid = Uuid::new_v4();
Index {
uid: format!("{prefix}-{}", uuid),
service: &self.service,
encoder: Encoder::Plain,
marker: PhantomData,
}
}
pub fn unique_index_with_encoder(&self, encoder: Encoder) -> Index<'_> {
let uuid = Uuid::new_v4();
Index { uid: uuid.to_string(), service: &self.service, encoder, marker: PhantomData }
@ -409,9 +399,18 @@ impl<State> Server<State> {
pub async fn wait_task(&self, update_id: u64) -> Value {
// try several times to get status, or panic to not wait forever
let url = format!("/tasks/{}", update_id);
let max_attempts = 400; // 200 seconds total, 0.5s per attempt
// Increase timeout for vector-related tests
let max_attempts = if url.contains("/tasks/") {
if update_id > 1000 {
400 // 200 seconds for vector tests
} else {
100 // 50 seconds for other tests
}
} else {
100 // 50 seconds for other tests
};
for i in 0..max_attempts {
for _ in 0..max_attempts {
let (response, status_code) = self.service.get(&url).await;
assert_eq!(200, status_code, "response: {}", response);
@ -421,10 +420,6 @@ impl<State> Server<State> {
// wait 0.5 second.
sleep(Duration::from_millis(500)).await;
if i == max_attempts - 1 {
dbg!(response);
}
}
panic!("Timeout waiting for update id");
}

File diff suppressed because it is too large Load Diff

View File

@ -1,35 +1,39 @@
use meili_snap::{json_string, snapshot};
use crate::common::{shared_does_not_exists_index, GetAllDocumentsOptions, Server};
use crate::common::{GetAllDocumentsOptions, Server};
use crate::json;
#[actix_rt::test]
async fn delete_one_document_unexisting_index() {
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
let server = Server::new().await;
let index = server.index("test");
let (task, code) = index.delete_document(0).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
}
#[actix_rt::test]
async fn delete_one_unexisting_document() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let (response, code) = index.delete_document(0).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!(code, 202, "{}", response);
let update = index.wait_task(response.uid()).await;
assert_eq!(update["status"], "succeeded");
}
#[actix_rt::test]
async fn delete_one_document() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) =
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, status_code) = index.delete_document(0).await;
let (task, status_code) = server.index("test").delete_document(0).await;
assert_eq!(status_code, 202);
index.wait_task(task.uid()).await.succeeded();
@ -39,18 +43,20 @@ async fn delete_one_document() {
#[actix_rt::test]
async fn clear_all_documents_unexisting_index() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
}
#[actix_rt::test]
async fn clear_all_documents() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index
.add_documents(
json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]),
@ -61,7 +67,7 @@ async fn clear_all_documents() {
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = index.wait_task(task.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -69,14 +75,14 @@ async fn clear_all_documents() {
#[actix_rt::test]
async fn clear_all_documents_empty_index() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = index.wait_task(task.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -84,31 +90,33 @@ async fn clear_all_documents_empty_index() {
#[actix_rt::test]
async fn error_delete_batch_unexisting_index() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, code) = index.delete_batch(vec![]).await;
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(response["error"], expected_response);
}
#[actix_rt::test]
async fn delete_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.delete_batch(vec![1, 0]).await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = index.wait_task(task.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@ -117,14 +125,14 @@ async fn delete_batch() {
#[actix_rt::test]
async fn delete_no_document_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{response}");
let (_response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{}", _response);
let _update = index.wait_task(response.uid()).await.succeeded();
let _update = index.wait_task(_response.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
@ -132,8 +140,8 @@ async fn delete_no_document_batch() {
#[actix_rt::test]
async fn delete_document_by_filter() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
index.update_settings_filterable_attributes(json!(["color"])).await;
let (task, _status_code) = index
.add_documents(
@ -170,22 +178,22 @@ async fn delete_document_by_filter() {
let (response, code) =
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 2,
"indexUid": "doggo",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 2,
"batchUid": 2,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -243,22 +251,22 @@ async fn delete_document_by_filter() {
let (response, code) =
index.delete_document_by_filter(json!({ "filter": "color NOT EXISTS"})).await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 3,
"indexUid": "doggo",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 3,
"batchUid": 3,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -313,8 +321,8 @@ async fn delete_document_by_filter() {
#[actix_rt::test]
async fn delete_document_by_complex_filter() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
index.update_settings_filterable_attributes(json!(["color"])).await;
let (task, _status_code) = index
.add_documents(
@ -335,22 +343,22 @@ async fn delete_document_by_complex_filter() {
)
.await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 2,
"indexUid": "doggo",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 2,
"batchUid": 2,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -394,22 +402,22 @@ async fn delete_document_by_complex_filter() {
.delete_document_by_filter(json!({ "filter": [["color = green", "color NOT EXISTS"]] }))
.await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 3,
"indexUid": "doggo",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 3,
"batchUid": 3,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,

View File

@ -621,7 +621,7 @@ async fn delete_document_by_filter() {
let (response, code) =
index.delete_document_by_filter_fail(json!({ "filter": "catto = jorts"})).await;
snapshot!(code, @"202 Accepted");
let response = server.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await;
snapshot!(response, @r###"
{
"uid": "[uid]",
@ -665,7 +665,7 @@ async fn fetch_document_by_filter() {
Some("id"),
)
.await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!(null)).await;
snapshot!(code, @"400 Bad Request");

View File

@ -832,8 +832,8 @@ async fn get_document_by_ids_and_filter() {
#[actix_rt::test]
async fn get_document_with_vectors() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({

View File

@ -6,18 +6,19 @@ use crate::json;
#[actix_rt::test]
async fn error_document_update_create_index_bad_uid() {
let server = Server::new_shared();
let index = server.unique_index_with_prefix("883 fj!");
let server = Server::new().await;
let index = server.index("883 fj!");
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "`883 fj!-[uuid]` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}"###);
let expected_response = json!({
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
});
assert_eq!(code, 400);
assert_eq!(response, expected_response);
}
#[actix_rt::test]

View File

@ -46,10 +46,8 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
let server = Server::new_shared();
let app = server.init_web_app().await;
let index = server.unique_index_with_prefix("test");
let body = serde_json::to_string(&json!({
"uid": index.uid.clone(),
"uid": "test",
"primaryKey": None::<&str>,
}))
.unwrap();
@ -70,7 +68,7 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
let parsed_response =
serde_json::from_slice::<Value>(decoded.into().as_ref()).expect("Expecting valid json");
assert_eq!(parsed_response["indexUid"], index.uid);
assert_eq!(parsed_response["indexUid"], "test");
}
#[actix_rt::test]

View File

@ -28,7 +28,6 @@ async fn error_delete_unexisting_index() {
let (task, code) = index.delete_index_fail().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": "Index `DOES_NOT_EXISTS` not found.",
@ -58,7 +57,7 @@ async fn loop_delete_add_documents() {
}
for task in tasks {
let response = index.wait_task(task).await.succeeded();
let response = index.wait_task(task).await;
assert_eq!(response["status"], "succeeded", "{}", response);
}
}

View File

@ -52,28 +52,19 @@ async fn no_index_return_empty_list() {
#[actix_rt::test]
async fn list_multiple_indexes() {
let server = Server::new_shared();
let server = Server::new().await;
server.index("test").create(None).await;
let (task, _status_code) = server.index("test1").create(Some("key")).await;
let index_without_key = server.unique_index();
let (response_without_key, _status_code) = index_without_key.create(None).await;
server.index("test").wait_task(task.uid()).await.succeeded();
let index_with_key = server.unique_index();
let (response_with_key, _status_code) = index_with_key.create(Some("key")).await;
index_without_key.wait_task(response_without_key.uid()).await.succeeded();
index_with_key.wait_task(response_with_key.uid()).await.succeeded();
let (response, code) = server.list_indexes(None, Some(1000)).await;
let (response, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert!(arr.len() >= 2, "Expected at least 2 indexes.");
assert!(arr
.iter()
.any(|entry| entry["uid"] == index_without_key.uid && entry["primaryKey"] == Value::Null));
assert!(arr
.iter()
.any(|entry| entry["uid"] == index_with_key.uid && entry["primaryKey"] == "key"));
assert_eq!(arr.len(), 2);
assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
}
#[actix_rt::test]

View File

@ -1,11 +1,10 @@
use crate::common::{shared_does_not_exists_index, Server};
use crate::common::Server;
use crate::json;
#[actix_rt::test]
async fn stats() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
@ -16,7 +15,7 @@ async fn stats() {
assert_eq!(code, 200);
assert_eq!(response["numberOfDocuments"], 0);
assert_eq!(response["isIndexing"], false);
assert!(response["isIndexing"] == false);
assert!(response["fieldDistribution"].as_object().unwrap().is_empty());
let documents = json!([
@ -32,6 +31,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
assert_eq!(response["taskUid"], 1);
index.wait_task(response.uid()).await.succeeded();
@ -39,7 +39,7 @@ async fn stats() {
assert_eq!(code, 200);
assert_eq!(response["numberOfDocuments"], 2);
assert_eq!(response["isIndexing"], false);
assert!(response["isIndexing"] == false);
assert_eq!(response["fieldDistribution"]["id"], 2);
assert_eq!(response["fieldDistribution"]["name"], 1);
assert_eq!(response["fieldDistribution"]["age"], 1);
@ -47,11 +47,11 @@ async fn stats() {
#[actix_rt::test]
async fn error_get_stats_unexisting_index() {
let index = shared_does_not_exists_index().await;
let (response, code) = index.stats().await;
let server = Server::new().await;
let (response, code) = server.index("test").stats().await;
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"

View File

@ -2,26 +2,28 @@ use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::encoder::Encoder;
use crate::common::{shared_does_not_exists_index, shared_index_with_documents, Server};
use crate::common::Server;
use crate::json;
#[actix_rt::test]
async fn update_primary_key() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, code) = index.create(None).await;
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
let (response, code) = index.get().await;
assert_eq!(code, 200);
assert_eq!(response["uid"], index.uid);
assert_eq!(response["uid"], "test");
assert!(response.get("createdAt").is_some());
assert!(response.get("updatedAt").is_some());
@ -37,23 +39,24 @@ async fn update_primary_key() {
#[actix_rt::test]
async fn create_and_update_with_different_encoding() {
let server = Server::new_shared();
let index = server.unique_index_with_encoder(Encoder::Gzip);
let (create_task, code) = index.create(None).await;
let server = Server::new().await;
let index = server.index_with_encoder("test", Encoder::Gzip);
let (_, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(create_task.uid()).await.succeeded();
let index = index.with_encoder(Encoder::Brotli);
let index = server.index_with_encoder("test", Encoder::Brotli);
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
#[actix_rt::test]
async fn update_nothing() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task1, code) = index.create(None).await;
assert_eq!(code, 202);
@ -64,20 +67,35 @@ async fn update_nothing() {
assert_eq!(code, 202);
index.wait_task(task2.uid()).await.succeeded();
let response = index.wait_task(task2.uid()).await;
assert_eq!(response["status"], "succeeded");
}
#[actix_rt::test]
async fn error_update_existing_primary_key() {
let index = shared_index_with_documents().await;
let (update_task, code) = index.update_index_fail(Some("primary")).await;
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
let response = index.wait_task(update_task.uid()).await.failed();
let documents = json!([
{
"id": "11",
"content": "foobar"
}
]);
index.add_documents(documents, None).await;
let (task, code) = index.update(Some("primary")).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await;
let expected_response = json!({
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
"message": "Index `test`: Index already has a primary key: `id`.",
"code": "index_primary_key_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
@ -88,15 +106,15 @@ async fn error_update_existing_primary_key() {
#[actix_rt::test]
async fn error_update_unexisting_index() {
let index = shared_does_not_exists_index().await;
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
let server = Server::new().await;
let (task, code) = server.index("test").update(None).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.index("test").wait_task(task.uid()).await;
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"

View File

@ -146,8 +146,8 @@ static DOCUMENT_DISTINCT_KEY: &str = "product_id";
/// testing: https://github.com/meilisearch/meilisearch/issues/4078
#[actix_rt::test]
async fn distinct_search_with_offset_no_ranking() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
@ -163,50 +163,50 @@ async fn distinct_search_with_offset_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
snapshot!(response["estimatedTotalHits"] , @"11");
let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
snapshot!(response["estimatedTotalHits"], @"10");
let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
snapshot!(response["estimatedTotalHits"], @"6");
let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"1");
snapshot!(format!("{hits:?}"), @r#"["345678"]"#);
snapshot!(format!("{:?}", hits), @r#"["345678"]"#);
snapshot!(response["estimatedTotalHits"], @"6");
let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(response["estimatedTotalHits"], @"6");
let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(response["estimatedTotalHits"], @"6");
}
/// testing: https://github.com/meilisearch/meilisearch/issues/4130
#[actix_rt::test]
async fn distinct_search_with_pagination_no_ranking() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
@ -222,7 +222,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(response["page"], @"0");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -231,7 +231,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
snapshot!(response["page"], @"1");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -240,7 +240,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
snapshot!(response["page"], @"2");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -249,7 +249,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
snapshot!(response["page"], @"3");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -258,7 +258,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(response["page"], @"4");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -267,7 +267,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"3");
snapshot!(format!("{hits:?}"), @r#"["987654", "234567", "345678"]"#);
snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#);
snapshot!(response["page"], @"2");
snapshot!(response["totalPages"], @"2");
snapshot!(response["totalHits"], @"6");
@ -275,13 +275,13 @@ async fn distinct_search_with_pagination_no_ranking() {
#[actix_rt::test]
async fn distinct_at_search_time() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("tamo");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
let task = index.wait_task(task.uid()).await.succeeded();
let task = index.wait_task(task.uid()).await;
snapshot!(task, name: "succeed");
fn get_hits(response: &Value) -> Vec<String> {
@ -299,7 +299,7 @@ async fn distinct_at_search_time() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"3");
snapshot!(format!("{hits:?}"), @r###"["1", "2", "3"]"###);
snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###);
snapshot!(response["page"], @"1");
snapshot!(response["totalPages"], @"1");
snapshot!(response["totalHits"], @"3");

View File

@ -707,7 +707,7 @@ async fn filter_invalid_attribute_array() {
|response, code| {
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -728,7 +728,7 @@ async fn filter_invalid_attribute_string() {
|response, code| {
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -885,7 +885,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -911,7 +911,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -932,7 +932,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -958,7 +958,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -984,7 +984,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `[uuid]`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -1143,7 +1143,7 @@ async fn search_on_unknown_field() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"code": "invalid_search_attributes_to_search_on",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
@ -1164,7 +1164,7 @@ async fn search_on_unknown_field_plus_joker() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"code": "invalid_search_attributes_to_search_on",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
@ -1182,7 +1182,7 @@ async fn search_on_unknown_field_plus_joker() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"code": "invalid_search_attributes_to_search_on",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
@ -1195,8 +1195,10 @@ async fn search_on_unknown_field_plus_joker() {
#[actix_rt::test]
async fn distinct_at_search_time() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
index.wait_task(response.uid()).await.succeeded();
@ -1206,7 +1208,7 @@ async fn distinct_at_search_time() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
"code": "invalid_search_distinct",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
@ -1221,7 +1223,7 @@ async fn distinct_at_search_time() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
"code": "invalid_search_distinct",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
@ -1236,7 +1238,7 @@ async fn distinct_at_search_time() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
"code": "invalid_search_distinct",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"

View File

@ -50,11 +50,13 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -63,18 +65,21 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = server.delete_index("test").await;
assert_eq!(code, 202, "{}", task);
server.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
eprintln!("Settings -> Documents -> test");
let index = server.index("test");
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -83,13 +88,14 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = server.delete_index("test").await;
assert_eq!(code, 202, "{}", task);
server.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
}
#[actix_rt::test]
async fn simple_facet_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -99,20 +105,20 @@ async fn simple_facet_search() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventure"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn simple_facet_search_on_movies() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{
@ -206,23 +212,23 @@ async fn simple_facet_search_on_movies() {
]);
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, code) =
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(response["facetHits"], @r###"[{"value":"Action","count":2},{"value":"Adventure","count":3},{"value":"Drama","count":3},{"value":"Fantasy","count":1},{"value":"Romance","count":1},{"value":"Science Fiction","count":1}]"###);
}
#[actix_rt::test]
async fn advanced_facet_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -245,8 +251,8 @@ async fn advanced_facet_search() {
#[actix_rt::test]
async fn more_advanced_facet_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -269,8 +275,8 @@ async fn more_advanced_facet_search() {
#[actix_rt::test]
async fn simple_facet_search_with_max_values() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
@ -281,14 +287,14 @@ async fn simple_facet_search_with_max_values() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn simple_facet_search_by_count_with_max_values() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index
@ -303,14 +309,14 @@ async fn simple_facet_search_by_count_with_max_values() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn non_filterable_facet_search_error() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
@ -318,17 +324,17 @@ async fn non_filterable_facet_search_error() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 400, "{response}");
assert_eq!(code, 400, "{}", response);
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adv"})).await;
assert_eq!(code, 400, "{response}");
assert_eq!(code, 400, "{}", response);
}
#[actix_rt::test]
async fn facet_search_dont_support_words() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -338,14 +344,14 @@ async fn facet_search_dont_support_words() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["facetHits"].as_array().unwrap().len(), 0);
}
#[actix_rt::test]
async fn simple_facet_search_with_sort_by_count() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
@ -356,7 +362,7 @@ async fn simple_facet_search_with_sort_by_count() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let hits = response["facetHits"].as_array().unwrap();
assert_eq!(hits.len(), 2);
assert_eq!(hits[0], json!({ "value": "Action", "count": 3 }));
@ -365,25 +371,25 @@ async fn simple_facet_search_with_sort_by_count() {
#[actix_rt::test]
async fn add_documents_and_deactivate_facet_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, code) = index
.update_settings(json!({
"facetSearch": false,
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 400, "{response}");
assert_eq!(code, 400, "{}", response);
snapshot!(response, @r###"
{
"message": "The facet search is disabled for this index",
@ -396,8 +402,8 @@ async fn add_documents_and_deactivate_facet_search() {
#[actix_rt::test]
async fn deactivate_facet_search_and_add_documents() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -405,16 +411,16 @@ async fn deactivate_facet_search_and_add_documents() {
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 400, "{response}");
assert_eq!(code, 400, "{}", response);
snapshot!(response, @r###"
{
"message": "The facet search is disabled for this index",
@ -427,8 +433,8 @@ async fn deactivate_facet_search_and_add_documents() {
#[actix_rt::test]
async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -436,31 +442,31 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, code) = index
.update_settings(json!({
"facetSearch": true,
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -468,25 +474,25 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, code) = index
.update_settings(json!({
"facetSearch": serde_json::Value::Null,
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
@ -612,8 +618,8 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
#[actix_rt::test]
async fn distinct_facet_search_on_movies() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{
@ -919,26 +925,26 @@ async fn distinct_facet_search_on_movies() {
]);
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, code) =
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;
// non-exhaustive facet count is counting 27 documents with the facet query "blob" but there are only 23 documents with a distinct color.
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":27}]"###);
let (response, code) =
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "", "exhaustiveFacetCount": true })).await;
// exhaustive facet count is counting 23 documents with the facet query "blob" which is the number of distinct colors.
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":23}]"###);
}

View File

@ -4,14 +4,23 @@ use tempfile::TempDir;
use super::test_settings_documents_indexing_swapping_and_search;
use crate::common::{
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
DOCUMENTS, NESTED_DOCUMENTS,
default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS,
};
use crate::json;
#[actix_rt::test]
async fn search_with_filter_string_notation() {
let index = shared_index_with_documents().await;
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
let (task, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(task.uid()).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
index
.search(
@ -19,34 +28,44 @@ async fn search_with_filter_string_notation() {
"filter": "title = Gläss"
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
},
)
.await;
let nested_index = shared_index_with_nested_documents().await;
let index = server.index("nested");
nested_index
let (_, code) =
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
meili_snap::snapshot!(code, @"202 Accepted");
let documents = NESTED_DOCUMENTS.clone();
let (task, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(task.uid()).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
index
.search(
json!({
"filter": "cattos = pésti"
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
assert_eq!(response["hits"][0]["id"], json!(852));
},
)
.await;
nested_index
index
.search(
json!({
"filter": "doggos.age > 5"
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
assert_eq!(response["hits"][0]["id"], json!(654));
assert_eq!(response["hits"][1]["id"], json!(951));
@ -63,7 +82,7 @@ async fn search_with_filter_array_notation() {
"filter": ["title = Gläss"]
}))
.await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
let (response, code) = index
@ -71,7 +90,7 @@ async fn search_with_filter_array_notation() {
"filter": [["title = Gläss", "title = \"Shazam!\"", "title = \"Escape Room\""]]
}))
.await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 3);
}
@ -97,7 +116,7 @@ async fn search_with_contains_filter() {
"filter": "title CONTAINS cap"
}))
.await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
}
@ -250,14 +269,16 @@ async fn search_with_pattern_filter_settings() {
#[actix_rt::test]
async fn search_with_pattern_filter_settings_scenario_1() {
let server = Server::new_shared();
let temp = TempDir::new().unwrap();
let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap();
eprintln!("Documents -> Settings -> test");
let index = server.unique_index();
let index = server.index("test");
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
let (task, code) = index
.update_settings(json!({"filterableAttributes": [{
@ -268,8 +289,9 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
// Check if the Equality filter works
index
@ -313,7 +335,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -333,8 +355,9 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
// Check if the Equality filter works
index
@ -444,8 +467,9 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
// Check if the Equality filter returns an error
index
@ -457,7 +481,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -543,8 +567,9 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
// Check if the Equality filter works
index
@ -588,7 +613,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -695,7 +720,7 @@ async fn test_filterable_attributes_priority() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `[uuid]`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
"message": "Index `test`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -721,7 +746,7 @@ async fn test_filterable_attributes_priority() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `[uuid]`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
"message": "Index `test`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"

View File

@ -1,13 +1,55 @@
use meili_snap::{json_string, snapshot};
use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME;
use once_cell::sync::Lazy;
use super::test_settings_documents_indexing_swapping_and_search;
use crate::common::shared_index_with_geo_documents;
use crate::common::{Server, Value};
use crate::json;
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
"id": 1,
"name": "Taco Truck",
"address": "444 Salsa Street, Burritoville",
"type": "Mexican",
"rating": 9,
"_geo": {
"lat": 34.0522,
"lng": -118.2437
}
},
{
"id": 2,
"name": "La Bella Italia",
"address": "456 Elm Street, Townsville",
"type": "Italian",
"rating": 9,
"_geo": {
"lat": "45.4777599",
"lng": "9.1967508"
}
},
{
"id": 3,
"name": "Crêpe Truck",
"address": "2 Billig Avenue, Rouenville",
"type": "French",
"rating": 10
}
])
});
#[actix_rt::test]
async fn geo_sort_with_geo_strings() {
let index = shared_index_with_geo_documents().await;
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["_geo"])).await;
index.update_settings_sortable_attributes(json!(["_geo"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -16,7 +58,7 @@ async fn geo_sort_with_geo_strings() {
"sort": ["_geoPoint(0.0, 0.0):asc"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
},
)
.await;
@ -24,7 +66,14 @@ async fn geo_sort_with_geo_strings() {
#[actix_rt::test]
async fn geo_bounding_box_with_string_and_number() {
let index = shared_index_with_geo_documents().await;
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["_geo"])).await;
index.update_settings_sortable_attributes(json!(["_geo"])).await;
let (ret, _code) = index.add_documents(documents, None).await;
index.wait_task(ret.uid()).await.succeeded();
index
.search(
@ -32,7 +81,7 @@ async fn geo_bounding_box_with_string_and_number() {
"filter": "_geoBoundingBox([89, 179], [-89, -179])",
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -74,7 +123,14 @@ async fn geo_bounding_box_with_string_and_number() {
#[actix_rt::test]
async fn bug_4640() {
// https://github.com/meilisearch/meilisearch/issues/4640
let index = shared_index_with_geo_documents().await;
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.update_settings_filterable_attributes(json!(["_geo"])).await;
let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await;
index.wait_task(ret.uid()).await.succeeded();
// Sort the document with the second one first
index
@ -83,7 +139,7 @@ async fn bug_4640() {
"sort": ["_geoPoint(45.4777599, 9.1967508):asc"],
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -146,7 +202,7 @@ async fn geo_asc_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
&json!({"q": "jean"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -191,7 +247,7 @@ async fn geo_asc_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
&json!({"q": "bob"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -228,7 +284,7 @@ async fn geo_asc_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
&json!({"q": "intel"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -268,7 +324,7 @@ async fn geo_sort_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "sort"], "sortableAttributes": [RESERVED_GEO_FIELD_NAME]}),
&json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [

View File

@ -2,31 +2,31 @@ use meili_snap::snapshot;
use once_cell::sync::Lazy;
use crate::common::index::Index;
use crate::common::{Server, Shared, Value};
use crate::common::{Server, Value};
use crate::json;
async fn index_with_documents_user_provided<'a>(
server: &'a Server<Shared>,
server: &'a Server,
documents: &Value,
) -> Index<'a> {
let index = server.unique_index();
let index = server.index("test");
let (response, code) = index
.update_settings(json!({ "embedders": {"default": {
"source": "userProvided",
"dimensions": 2}}} ))
.await;
assert_eq!(202, code, "{response:?}");
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
index
}
async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
let index = server.unique_index();
async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
let (response, code) = index
.update_settings(json!({ "embedders": {"default": {
@ -36,11 +36,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Val
"documentTemplate": "{{doc.title}}, {{doc.desc}}"
}}} ))
.await;
assert_eq!(202, code, "{response:?}");
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{response:?}");
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
index
}
@ -76,48 +76,6 @@ static SINGLE_DOCUMENT_VEC: Lazy<Value> = Lazy::new(|| {
}])
});
static TEST_DISTINCT_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
// for query "Captain Marvel" and vector [1.0, 1.0]
json!([
{
"id": 0,
"search": "Captain Planet",
"desc": "#2 for keyword search, #3 for hybrid search",
"_vectors": {
"default": [-1.0, 0.0],
},
"distinct": 0
},
{
"id": 1,
"search": "Captain Marvel",
"desc": "#1 for keyword search, #4 for hybrid search",
"_vectors": {
"default": [-1.0, -1.0],
},
"distinct": 1
},
{
"id": 2,
"search": "Some Captain at least",
"desc": "#3 for keyword search, #1 for hybrid search",
"_vectors": {
"default": [1.0, 1.0],
},
"distinct": 0
},
{
"id": 3,
"search": "Irrelevant Capitaine",
"desc": "#4 for keyword search, #2 for hybrid search",
"_vectors": {
"default": [1.0, 0.0],
},
"distinct": 1
},
])
});
static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
@ -139,8 +97,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -172,8 +130,8 @@ async fn simple_search() {
#[actix_rt::test]
async fn limit_offset() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -185,8 +143,8 @@ async fn limit_offset() {
snapshot!(response["semanticHitCount"], @"0");
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -201,8 +159,8 @@ async fn limit_offset() {
#[actix_rt::test]
async fn simple_search_hf() {
let server = Server::new_shared();
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (response, code) = index
.search_post(
@ -253,8 +211,8 @@ async fn simple_search_hf() {
#[actix_rt::test]
async fn distribution_shift() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true});
let (response, code) = index.search_post(search.clone()).await;
@ -275,7 +233,7 @@ async fn distribution_shift() {
.await;
snapshot!(code, @"202 Accepted");
let response = server.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await;
snapshot!(response["details"], @r#"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"#);
let (response, code) = index.search_post(search).await;
@ -285,8 +243,8 @@ async fn distribution_shift() {
#[actix_rt::test]
async fn highlighter() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
@ -340,8 +298,8 @@ async fn highlighter() {
#[actix_rt::test]
async fn invalid_semantic_ratio() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -412,8 +370,8 @@ async fn invalid_semantic_ratio() {
#[actix_rt::test]
async fn single_document() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SINGLE_DOCUMENT_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SINGLE_DOCUMENT_VEC).await;
let (response, code) = index
.search_post(
@ -428,8 +386,8 @@ async fn single_document() {
#[actix_rt::test]
async fn query_combination() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
// search without query and vector, but with hybrid => still placeholder
let (response, code) = index
@ -535,54 +493,10 @@ async fn query_combination() {
snapshot!(response["semanticHitCount"], @"0");
}
// see <https://github.com/meilisearch/meilisearch/issues/5526>
#[actix_rt::test]
async fn distinct_is_applied() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &TEST_DISTINCT_DOCUMENTS).await;
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
// pure keyword
let (response, code) = index
.search_post(
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.0, "embedder": "default"}}),
)
.await;
snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1},{"id":0,"search":"Captain Planet","desc":"#2 for keyword search, #3 for hybrid search","distinct":0}]"###);
snapshot!(response["semanticHitCount"], @"null");
snapshot!(response["estimatedTotalHits"], @"2");
// pure semantic
let (response, code) = index
.search_post(
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}}),
)
.await;
snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":3,"search":"Irrelevant Capitaine","desc":"#4 for keyword search, #2 for hybrid search","distinct":1}]"###);
snapshot!(response["semanticHitCount"], @"2");
snapshot!(response["estimatedTotalHits"], @"2");
// hybrid
let (response, code) = index
.search_post(
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}}),
)
.await;
snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1}]"###);
snapshot!(response["semanticHitCount"], @"1");
snapshot!(response["estimatedTotalHits"], @"2");
}
#[actix_rt::test]
async fn retrieve_vectors() {
let server = Server::new_shared();
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (response, code) = index
.search_post(
@ -632,7 +546,7 @@ async fn retrieve_vectors() {
let (response, code) = index
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
.await;
assert_eq!(202, code, "{response:?}");
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
@ -682,7 +596,7 @@ async fn retrieve_vectors() {
// remove `_vectors` from displayed attributes
let (response, code) =
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
assert_eq!(202, code, "{response:?}");
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index

View File

@ -89,9 +89,9 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index
.update_settings(
@ -147,20 +147,23 @@ async fn simple_search() {
.search(
json!({"q": "進撃", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"hits": [
{
"id": 852
},
{
"id": 853
}
],
"query": "進撃",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1
"estimatedTotalHits": 2
}
"#);
"###);
snapshot!(code, @"200 OK");
},
)
@ -169,20 +172,23 @@ async fn simple_search() {
// chinese
index
.search(json!({"q": "进击", "attributesToRetrieve": ["id"]}), |response, code| {
snapshot!(response, @r#"
snapshot!(response, @r###"
{
"hits": [
{
"id": 853
},
{
"id": 852
}
],
"query": "进击",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 1
"estimatedTotalHits": 2
}
"#);
"###);
snapshot!(code, @"200 OK");
})
.await;
@ -190,9 +196,9 @@ async fn simple_search() {
#[actix_rt::test]
async fn force_locales() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -205,10 +211,10 @@ async fn force_locales() {
}),
)
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -268,9 +274,9 @@ async fn force_locales() {
#[actix_rt::test]
async fn force_locales_with_pattern() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -283,10 +289,10 @@ async fn force_locales_with_pattern() {
}),
)
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -346,9 +352,9 @@ async fn force_locales_with_pattern() {
#[actix_rt::test]
async fn force_locales_with_pattern_nested() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
@ -359,10 +365,10 @@ async fn force_locales_with_pattern_nested() {
]
}))
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -417,9 +423,9 @@ async fn force_locales_with_pattern_nested() {
}
#[actix_rt::test]
async fn force_different_locales_with_pattern() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -434,10 +440,10 @@ async fn force_different_locales_with_pattern() {
}),
)
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -493,9 +499,9 @@ async fn force_different_locales_with_pattern() {
#[actix_rt::test]
async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -512,10 +518,10 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
}),
)
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -571,9 +577,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
#[actix_rt::test]
async fn auto_infer_locales_at_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -586,10 +592,10 @@ async fn auto_infer_locales_at_search() {
}),
)
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -670,9 +676,9 @@ async fn auto_infer_locales_at_search() {
#[actix_rt::test]
async fn force_different_locales_with_pattern_nested() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
@ -685,10 +691,10 @@ async fn force_different_locales_with_pattern_nested() {
]
}))
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -768,9 +774,9 @@ async fn force_different_locales_with_pattern_nested() {
#[actix_rt::test]
async fn settings_change() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
@ -783,10 +789,10 @@ async fn settings_change() {
]
}))
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 1,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -846,10 +852,10 @@ async fn settings_change() {
]
}))
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 2,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -900,9 +906,9 @@ async fn settings_change() {
#[actix_rt::test]
async fn invalid_locales() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index
.update_settings(
@ -939,9 +945,9 @@ async fn invalid_locales() {
#[actix_rt::test]
async fn invalid_localized_attributes_rules() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, _) = index
.update_settings(json!({
"localizedAttributes": [
@ -1009,19 +1015,19 @@ async fn invalid_localized_attributes_rules() {
#[actix_rt::test]
async fn simple_facet_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
"filterableAttributes": ["name_en", "name_ja", "name_zh"],
}))
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -1067,9 +1073,9 @@ async fn simple_facet_search() {
#[actix_rt::test]
async fn facet_search_with_localized_attributes() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
@ -1080,10 +1086,10 @@ async fn facet_search_with_localized_attributes() {
]
}))
.await;
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
snapshot!(response, @r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 0,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -1140,9 +1146,9 @@ async fn facet_search_with_localized_attributes() {
#[actix_rt::test]
async fn swedish_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{"id": "tra1-1", "product": "trä"},
{"id": "tra2-1", "product": "traktor"},
@ -1263,9 +1269,9 @@ async fn swedish_search() {
#[actix_rt::test]
async fn german_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{"id": 1, "product": "Interkulturalität"},
{"id": 2, "product": "Wissensorganisation"},

View File

@ -2,11 +2,11 @@ use meili_snap::snapshot;
use once_cell::sync::Lazy;
use crate::common::index::Index;
use crate::common::{Server, Shared, Value};
use crate::common::{Server, Value};
use crate::json;
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
let index = server.unique_index();
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -48,8 +48,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Captain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
@ -75,8 +75,8 @@ async fn simple_search() {
#[actix_rt::test]
async fn search_with_typo() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Capitain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
@ -102,8 +102,8 @@ async fn search_with_typo() {
#[actix_rt::test]
async fn search_with_unknown_word() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Captain Supercopter Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {

View File

@ -1,4 +1,4 @@
// This module contains all the test concerning search. Each particular feature of the search
// This modules contains all the test concerning search. Each particular feature of the search
// should be tested in its own module to isolate tests and keep the tests readable.
mod distinct;
@ -17,11 +17,12 @@ mod restrict_searchable;
mod search_queue;
use meili_snap::{json_string, snapshot};
use meilisearch::Opt;
use tempfile::TempDir;
use crate::common::{
shared_index_with_documents, shared_index_with_nested_documents,
shared_index_with_score_documents, Server, Value, DOCUMENTS, FRUITS_DOCUMENTS,
NESTED_DOCUMENTS, SCORE_DOCUMENTS, VECTOR_DOCUMENTS,
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
Value, DOCUMENTS, FRUITS_DOCUMENTS, NESTED_DOCUMENTS, SCORE_DOCUMENTS, VECTOR_DOCUMENTS,
};
use crate::json;
@ -31,33 +32,46 @@ async fn test_settings_documents_indexing_swapping_and_search(
query: &Value,
test: impl Fn(Value, actix_http::StatusCode) + std::panic::UnwindSafe + Clone,
) {
let server = Server::new_shared();
let temp = TempDir::new().unwrap();
let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap();
eprintln!("Documents -> Settings -> test");
let index = server.unique_index();
let index = server.index("test");
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
index.search(query.clone(), test.clone()).await;
let (task, code) = server.delete_index("test").await;
assert_eq!(code, 202, "{}", task);
let response = server.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
eprintln!("Settings -> Documents -> test");
let index = server.unique_index();
let index = server.index("test");
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
index.search(query.clone(), test.clone()).await;
let (task, code) = server.delete_index("test").await;
assert_eq!(code, 202, "{}", task);
let response = server.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
}
#[actix_rt::test]
@ -65,7 +79,7 @@ async fn simple_placeholder_search() {
let index = shared_index_with_documents().await;
index
.search(json!({}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
})
.await;
@ -73,7 +87,7 @@ async fn simple_placeholder_search() {
let index = shared_index_with_nested_documents().await;
index
.search(json!({}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
})
.await;
@ -84,7 +98,7 @@ async fn simple_search() {
let index = shared_index_with_documents().await;
index
.search(json!({"q": "glass"}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
})
.await;
@ -92,7 +106,7 @@ async fn simple_search() {
let index = shared_index_with_nested_documents().await;
index
.search(json!({"q": "pésti"}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
})
.await;
@ -101,8 +115,8 @@ async fn simple_search() {
/// See <https://github.com/meilisearch/meilisearch/issues/5547>
#[actix_rt::test]
async fn bug_5547() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("big_fst");
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
@ -121,22 +135,22 @@ async fn bug_5547() {
#[actix_rt::test]
async fn search_with_stop_word() {
// related to https://github.com/meilisearch/meilisearch/issues/4984
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index
.update_settings(json!({"stopWords": ["the", "The", "a", "an", "to", "in", "of"]}))
.await;
snapshot!(code, @"202 Accepted");
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
let (task, _code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.add_documents(documents, None).await;
index.wait_task(1).await;
// prefix search
index
.search(json!({"q": "to the", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @"[]");
})
.await;
@ -144,7 +158,7 @@ async fn search_with_stop_word() {
// non-prefix search
index
.search(json!({"q": "to the ", "attributesToHighlight": ["title"], "attributesToRetrieve": ["title"] }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -186,13 +200,13 @@ async fn search_with_stop_word() {
#[actix_rt::test]
async fn search_with_typo_settings() {
// related to https://github.com/meilisearch/meilisearch/issues/5240
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index
.update_settings(json!({"typoTolerance": { "disableOnAttributes": ["title", "id"]}}))
.await;
snapshot!(code, @"202 Accepted");
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
@ -200,7 +214,7 @@ async fn search_with_typo_settings() {
index
.search(json!({"q": "287947" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -220,11 +234,11 @@ async fn search_with_typo_settings() {
#[actix_rt::test]
async fn phrase_search_with_stop_word() {
// related to https://github.com/meilisearch/meilisearch/issues/3521
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.update_settings(json!({"stopWords": ["the", "of"]})).await;
snapshot!(code, @"202 Accepted");
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
@ -232,7 +246,7 @@ async fn phrase_search_with_stop_word() {
index
.search(json!({"q": "how \"to\" train \"the" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
})
.await;
@ -243,7 +257,7 @@ async fn negative_phrase_search() {
let index = shared_index_with_documents().await;
index
.search(json!({"q": "-\"train your dragon\"" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let hits = response["hits"].as_array().unwrap();
assert_eq!(hits.len(), 4);
assert_eq!(hits[0]["id"], "287947");
@ -259,7 +273,7 @@ async fn negative_word_search() {
let index = shared_index_with_documents().await;
index
.search(json!({"q": "-escape" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let hits = response["hits"].as_array().unwrap();
assert_eq!(hits.len(), 4);
assert_eq!(hits[0]["id"], "287947");
@ -272,7 +286,7 @@ async fn negative_word_search() {
// Everything that contains derivates of escape but not escape: nothing
index
.search(json!({"q": "-escape escape" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let hits = response["hits"].as_array().unwrap();
assert_eq!(hits.len(), 0);
})
@ -284,7 +298,7 @@ async fn non_negative_search() {
let index = shared_index_with_documents().await;
index
.search(json!({"q": "- escape" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let hits = response["hits"].as_array().unwrap();
assert_eq!(hits.len(), 1);
assert_eq!(hits[0]["id"], "522681");
@ -293,7 +307,7 @@ async fn non_negative_search() {
index
.search(json!({"q": "- \"train your dragon\"" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let hits = response["hits"].as_array().unwrap();
assert_eq!(hits.len(), 1);
assert_eq!(hits[0]["id"], "166428");
@ -303,8 +317,8 @@ async fn non_negative_search() {
#[actix_rt::test]
async fn negative_special_cases_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
@ -317,7 +331,7 @@ async fn negative_special_cases_search() {
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
index
.search(json!({"q": "-escape escape" }), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let hits = response["hits"].as_array().unwrap();
assert_eq!(hits.len(), 1);
assert_eq!(hits[0]["id"], "450465");
@ -329,8 +343,8 @@ async fn negative_special_cases_search() {
#[cfg(not(feature = "chinese-pinyin"))]
#[actix_rt::test]
async fn test_kanji_language_detection() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{ "id": 0, "title": "The quick (\"brown\") fox can't jump 32.3 feet, right? Brr, it's 29.3°F!" },
@ -342,7 +356,7 @@ async fn test_kanji_language_detection() {
index
.search(json!({"q": "東京"}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
})
.await;
@ -351,8 +365,8 @@ async fn test_kanji_language_detection() {
#[cfg(feature = "default")]
#[actix_rt::test]
async fn test_thai_language() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
// We don't need documents, the issue is on the query side only.
let documents = json!([
@ -368,7 +382,7 @@ async fn test_thai_language() {
index
.search(json!({"q": "สบู"}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
})
.await;
}
@ -386,7 +400,7 @@ async fn search_multiple_params() {
"offset": 0,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
},
)
@ -403,7 +417,7 @@ async fn search_multiple_params() {
"offset": 0,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
},
)
@ -419,7 +433,7 @@ async fn search_with_sort_on_numbers() {
"sort": ["id:asc"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
},
)
@ -432,7 +446,7 @@ async fn search_with_sort_on_numbers() {
"sort": ["doggos.age:asc"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
},
)
@ -448,7 +462,7 @@ async fn search_with_sort_on_strings() {
"sort": ["title:desc"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
},
)
@ -461,7 +475,7 @@ async fn search_with_sort_on_strings() {
"sort": ["doggos.name:asc"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 4);
},
)
@ -476,7 +490,7 @@ async fn search_with_multiple_sort() {
"sort": ["id:asc", "title:desc"]
}))
.await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
}
@ -489,7 +503,7 @@ async fn search_facet_distribution() {
"facets": ["title"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 1);
assert!(dist.get("title").is_some());
@ -507,7 +521,7 @@ async fn search_facet_distribution() {
"facets": ["father"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 1);
assert_eq!(
@ -530,9 +544,9 @@ async fn search_facet_distribution() {
"facets": ["doggos.name"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 1, "{dist:?}");
assert_eq!(dist.len(), 1, "{:?}", dist);
assert_eq!(
dist["doggos.name"],
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
@ -547,9 +561,9 @@ async fn search_facet_distribution() {
"facets": ["doggos"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 3, "{dist:?}");
assert_eq!(dist.len(), 3, "{:?}", dist);
assert_eq!(
dist["doggos.name"],
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
@ -565,7 +579,7 @@ async fn search_facet_distribution() {
"facets": ["doggos.name"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 1);
assert_eq!(
@ -579,8 +593,8 @@ async fn search_facet_distribution() {
#[actix_rt::test]
async fn displayed_attributes() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
index.update_settings(json!({ "displayedAttributes": ["title"] })).await;
@ -590,14 +604,14 @@ async fn displayed_attributes() {
let (response, code) =
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert!(response["hits"][0].get("title").is_some());
}
#[actix_rt::test]
async fn placeholder_search_is_hard_limited() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
@ -609,7 +623,7 @@ async fn placeholder_search_is_hard_limited() {
"limit": 1500,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1000);
},
)
@ -622,7 +636,7 @@ async fn placeholder_search_is_hard_limited() {
"limit": 400,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
},
)
@ -638,7 +652,7 @@ async fn placeholder_search_is_hard_limited() {
"limit": 1500,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
},
)
@ -651,7 +665,7 @@ async fn placeholder_search_is_hard_limited() {
"limit": 400,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
},
)
@ -660,8 +674,8 @@ async fn placeholder_search_is_hard_limited() {
#[actix_rt::test]
async fn search_is_hard_limited() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
let (task, _status_code) = index.add_documents(documents.into(), None).await;
@ -674,7 +688,7 @@ async fn search_is_hard_limited() {
"limit": 1500,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1000);
},
)
@ -688,7 +702,7 @@ async fn search_is_hard_limited() {
"limit": 400,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
},
)
@ -705,7 +719,7 @@ async fn search_is_hard_limited() {
"limit": 1500,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
},
)
@ -719,7 +733,7 @@ async fn search_is_hard_limited() {
"limit": 400,
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
},
)
@ -728,8 +742,8 @@ async fn search_is_hard_limited() {
#[actix_rt::test]
async fn faceting_max_values_per_facet() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
index.update_settings(json!({ "filterableAttributes": ["number"] })).await;
@ -743,7 +757,7 @@ async fn faceting_max_values_per_facet() {
"facets": ["number"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let numbers = response["facetDistribution"]["number"].as_object().unwrap();
assert_eq!(numbers.len(), 100);
},
@ -760,7 +774,7 @@ async fn faceting_max_values_per_facet() {
"facets": ["number"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let numbers = &response["facetDistribution"]["number"].as_object().unwrap();
assert_eq!(numbers.len(), 10_000);
},
@ -770,7 +784,13 @@ async fn faceting_max_values_per_facet() {
#[actix_rt::test]
async fn test_score_details() {
let index = shared_index_with_documents().await;
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
let res = index.add_documents(json!(documents), None).await;
index.wait_task(res.0.uid()).await.succeeded();
index
.search(
@ -779,8 +799,8 @@ async fn test_score_details() {
"showRankingScoreDetails": true,
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "How to Train Your Dragon: The Hidden World",
@ -830,7 +850,13 @@ async fn test_score_details() {
#[actix_rt::test]
async fn test_score() {
let index = shared_index_with_score_documents().await;
let server = Server::new().await;
let index = server.index("test");
let documents = SCORE_DOCUMENTS.clone();
let res = index.add_documents(json!(documents), None).await;
index.wait_task(res.0.uid()).await.succeeded();
index
.search(
@ -839,8 +865,8 @@ async fn test_score() {
"showRankingScore": true,
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "Batman the dark knight returns: Part 1",
@ -877,7 +903,13 @@ async fn test_score() {
#[actix_rt::test]
async fn test_score_threshold() {
let query = "Badman dark returns 1";
let index = shared_index_with_score_documents().await;
let server = Server::new().await;
let index = server.index("test");
let documents = SCORE_DOCUMENTS.clone();
let res = index.add_documents(json!(documents), None).await;
index.wait_task(res.0.uid()).await.succeeded();
index
.search(
@ -887,9 +919,9 @@ async fn test_score_threshold() {
"rankingScoreThreshold": 0.0
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["estimatedTotalHits"]), @"5");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"5");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "Batman the dark knight returns: Part 1",
@ -930,9 +962,9 @@ async fn test_score_threshold() {
"rankingScoreThreshold": 0.2
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["estimatedTotalHits"]), @r###"3"###);
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"3"###);
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "Batman the dark knight returns: Part 1",
@ -963,9 +995,9 @@ async fn test_score_threshold() {
"rankingScoreThreshold": 0.5
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["estimatedTotalHits"]), @r###"2"###);
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"2"###);
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "Batman the dark knight returns: Part 1",
@ -991,9 +1023,9 @@ async fn test_score_threshold() {
"rankingScoreThreshold": 0.8
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["estimatedTotalHits"]), @r###"1"###);
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"1"###);
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "Batman the dark knight returns: Part 1",
@ -1014,10 +1046,10 @@ async fn test_score_threshold() {
"rankingScoreThreshold": 1.0
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["estimatedTotalHits"]), @r###"0"###);
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @r###"0"###);
// nobody is perfect
snapshot!(json_string!(response["hits"]), @"[]");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @"[]");
},
)
.await;
@ -1025,8 +1057,8 @@ async fn test_score_threshold() {
#[actix_rt::test]
async fn test_degraded_score_details() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
@ -1043,8 +1075,8 @@ async fn test_degraded_score_details() {
"showRankingScoreDetails": true,
}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
{
"hits": [
{
@ -1114,8 +1146,8 @@ async fn test_degraded_score_details() {
#[cfg(feature = "default")]
#[actix_rt::test]
async fn camelcased_words() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
// related to https://github.com/meilisearch/meilisearch/issues/3818
let documents = json!([
@ -1130,8 +1162,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "deLonghi"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 0,
@ -1148,8 +1180,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "dellonghi"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 0,
@ -1166,8 +1198,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "testa"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 2,
@ -1188,8 +1220,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "testab"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 2,
@ -1210,8 +1242,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "TestaB"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 2,
@ -1232,8 +1264,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "Testab"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 2,
@ -1254,8 +1286,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "TestAb"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 2,
@ -1277,8 +1309,8 @@ async fn camelcased_words() {
// with Typos
index
.search(json!({"q": "dellonghi"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 0,
@ -1295,8 +1327,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "TetsAB"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 2,
@ -1317,8 +1349,8 @@ async fn camelcased_words() {
index
.search(json!({"q": "TetsAB"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 2,
@ -1340,13 +1372,13 @@ async fn camelcased_words() {
#[actix_rt::test]
async fn simple_search_with_strange_synonyms() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) =
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
let r = index.wait_task(task.uid()).await.succeeded();
snapshot!(r["status"], @r###""succeeded""###);
let r = index.wait_task(task.uid()).await;
meili_snap::snapshot!(r["status"], @r###""succeeded""###);
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
@ -1354,8 +1386,8 @@ async fn simple_search_with_strange_synonyms() {
index
.search(json!({"q": "How to train"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "How to Train Your Dragon: The Hidden World",
@ -1372,8 +1404,8 @@ async fn simple_search_with_strange_synonyms() {
index
.search(json!({"q": "How & train"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "How to Train Your Dragon: The Hidden World",
@ -1390,8 +1422,8 @@ async fn simple_search_with_strange_synonyms() {
index
.search(json!({"q": "to"}), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"title": "How to Train Your Dragon: The Hidden World",
@ -1409,8 +1441,8 @@ async fn simple_search_with_strange_synonyms() {
#[actix_rt::test]
async fn change_attributes_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
index.update_settings(json!({ "searchableAttributes": ["father", "mother"] })).await;
@ -1430,8 +1462,8 @@ async fn change_attributes_settings() {
"attributesToRetrieve": ["id", "doggos"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response["hits"]), @r###"
assert_eq!(code, 200, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 852,
@ -1461,8 +1493,8 @@ async fn change_attributes_settings() {
"attributesToRetrieve": ["id", "doggos"]
}),
|response, code| {
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response["hits"]), @r###"
assert_eq!(code, 200, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###"
[
{
"id": 852,
@ -1531,7 +1563,7 @@ async fn test_nested_fields() {
&settings,
&json!({"q": "document"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1577,7 +1609,7 @@ async fn test_nested_fields() {
&settings,
&json!({"q": "zeroth"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1595,7 +1627,7 @@ async fn test_nested_fields() {
&settings,
&json!({"q": "first"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1618,7 +1650,7 @@ async fn test_nested_fields() {
&settings,
&json!({"q": "field"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1654,7 +1686,7 @@ async fn test_nested_fields() {
&settings,
&json!({"q": "array"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
// nested is not searchable
snapshot!(json_string!(response["hits"]), @"[]");
},
@ -1666,7 +1698,7 @@ async fn test_nested_fields() {
&settings,
&json!({"q": "lied"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
// nested is not searchable
snapshot!(json_string!(response["hits"]), @"[]");
},
@ -1679,7 +1711,7 @@ async fn test_nested_fields() {
&settings,
&json!({"filter": "nested.object = field"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1715,7 +1747,7 @@ async fn test_nested_fields() {
&settings,
&json!({"filter": "nested.machin = bidule"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1738,10 +1770,10 @@ async fn test_nested_fields() {
&settings,
&json!({"filter": "nested = array"}),
|response, code| {
assert_eq!(code, 400, "{response}");
assert_eq!(code, 400, "{}", response);
snapshot!(json_string!(response), @r###"
{
"message": "Index `[uuid]`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = array",
"message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = array",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -1757,10 +1789,10 @@ async fn test_nested_fields() {
&settings,
&json!({"filter": r#"nested = "I lied""#}),
|response, code| {
assert_eq!(code, 400, "{response}");
assert_eq!(code, 400, "{}", response);
snapshot!(json_string!(response), @r###"
{
"message": "Index `[uuid]`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = \"I lied\"",
"message": "Index `test`: Attribute `nested` is not filterable. Available filterable attribute patterns are: `nested.machin`, `nested.object`, `title`.\n1:7 nested = \"I lied\"",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -1818,7 +1850,7 @@ async fn test_typo_settings() {
}),
&json!({"q": "document"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1870,7 +1902,7 @@ async fn test_typo_settings() {
}),
&json!({"q": "docume"}),
|response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["hits"]), @r###"
[
{
@ -1914,8 +1946,8 @@ async fn test_typo_settings() {
/// Modifying facets with different casing should work correctly
#[actix_rt::test]
async fn change_facet_casing() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -1923,7 +1955,7 @@ async fn change_facet_casing() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, _code) = index
.add_documents(
@ -1936,7 +1968,7 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
let (response, _code) = index
.add_documents(
@ -1949,12 +1981,12 @@ async fn change_facet_casing() {
None,
)
.await;
index.wait_task(response.uid()).await.succeeded();
index.wait_task(response.uid()).await;
index
.search(json!({ "facets": ["dog"] }), |response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["facetDistribution"]), @r###"
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["facetDistribution"]), @r###"
{
"dog": {
"bouvier bernois": 1

File diff suppressed because it is too large Load Diff

View File

@ -2296,7 +2296,6 @@ async fn error_remote_500_once() {
}
#[actix_rt::test]
#[ignore]
async fn error_remote_timeout() {
let ms0 = Server::new().await;
let ms1 = Server::new().await;

View File

@ -7,7 +7,7 @@ async fn default_search_should_return_estimated_total_hit() {
let index = shared_index_with_documents().await;
index
.search(json!({}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert!(response.get("estimatedTotalHits").is_some());
assert!(response.get("limit").is_some());
assert!(response.get("offset").is_some());
@ -25,7 +25,7 @@ async fn simple_search() {
let index = shared_index_with_documents().await;
index
.search(json!({"page": 1}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
assert!(response.get("totalHits").is_some());
assert_eq!(response["page"], 1);
@ -44,7 +44,7 @@ async fn page_zero_should_not_return_any_result() {
let index = shared_index_with_documents().await;
index
.search(json!({"page": 0}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
assert!(response.get("totalHits").is_some());
assert_eq!(response["page"], 0);
@ -58,7 +58,7 @@ async fn hits_per_page_1() {
let index = shared_index_with_documents().await;
index
.search(json!({"hitsPerPage": 1}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
assert_eq!(response["totalHits"], 5);
assert_eq!(response["page"], 1);
@ -72,7 +72,7 @@ async fn hits_per_page_0_should_not_return_any_result() {
let index = shared_index_with_documents().await;
index
.search(json!({"hitsPerPage": 0}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
assert_eq!(response["totalHits"], 5);
assert_eq!(response["page"], 1);
@ -126,7 +126,7 @@ async fn ensure_placeholder_search_hit_count_valid() {
for page in 0..=4 {
index
.search(json!({"page": page, "hitsPerPage": 1}), |response, code| {
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["totalHits"], 4);
assert_eq!(response["totalPages"], 4);
})

View File

@ -2,11 +2,11 @@ use meili_snap::{json_string, snapshot};
use once_cell::sync::Lazy;
use crate::common::index::Index;
use crate::common::{Server, Shared, Value};
use crate::common::{Server, Value};
use crate::json;
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
let index = server.unique_index();
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
let (task, _code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -34,8 +34,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search_on_title() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search should return 2 documents (ids: 2 and 3).
index
@ -51,8 +51,8 @@ async fn simple_search_on_title() {
#[actix_rt::test]
async fn search_no_searchable_attribute_set() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(
@ -93,8 +93,8 @@ async fn search_no_searchable_attribute_set() {
#[actix_rt::test]
async fn search_on_all_attributes() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
@ -106,8 +106,8 @@ async fn search_on_all_attributes() {
#[actix_rt::test]
async fn search_on_all_attributes_restricted_set() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
index.wait_task(task.uid()).await.succeeded();
@ -121,8 +121,8 @@ async fn search_on_all_attributes_restricted_set() {
#[actix_rt::test]
async fn simple_prefix_search_on_title() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search should return 2 documents (ids: 2 and 3).
index
@ -135,8 +135,8 @@ async fn simple_prefix_search_on_title() {
#[actix_rt::test]
async fn simple_search_on_title_matching_strategy_all() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search matching strategy all should only return 1 document (ids: 2).
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["title"], "matchingStrategy": "all"}), |response, code| {
@ -148,8 +148,8 @@ async fn simple_search_on_title_matching_strategy_all() {
#[actix_rt::test]
async fn simple_search_on_no_field() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search on no field shouldn't return any document.
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": []}), |response, code| {
@ -161,8 +161,8 @@ async fn simple_search_on_no_field() {
#[actix_rt::test]
async fn word_ranking_rule_order() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
// Document 3 should appear before document 2.
index
@ -189,8 +189,8 @@ async fn word_ranking_rule_order() {
#[actix_rt::test]
async fn word_ranking_rule_order_exact_words() {
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (task, _status_code) = index
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
.await;
@ -221,9 +221,9 @@ async fn word_ranking_rule_order_exact_words() {
#[actix_rt::test]
async fn typo_ranking_rule_order() {
let server = Server::new_shared();
let server = Server::new().await;
let index = index_with_documents(
server,
&server,
&json!([
{
"title": "Capitain Marivel",
@ -260,9 +260,9 @@ async fn typo_ranking_rule_order() {
#[actix_rt::test]
async fn attributes_ranking_rule_order() {
let server = Server::new_shared();
let server = Server::new().await;
let index = index_with_documents(
server,
&server,
&json!([
{
"title": "Captain Marvel",
@ -301,9 +301,9 @@ async fn attributes_ranking_rule_order() {
#[actix_rt::test]
async fn exactness_ranking_rule_order() {
let server = Server::new_shared();
let server = Server::new().await;
let index = index_with_documents(
server,
&server,
&json!([
{
"title": "Captain Marvel",
@ -340,9 +340,9 @@ async fn exactness_ranking_rule_order() {
#[actix_rt::test]
async fn search_on_exact_field() {
let server = Server::new_shared();
let server = Server::new().await;
let index = index_with_documents(
server,
&server,
&json!([
{
"title": "Captain Marvel",
@ -359,7 +359,7 @@ async fn search_on_exact_field() {
let (response, code) =
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
assert_eq!(202, code, "{response:?}");
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
// Searching on an exact attribute should only return the document matching without typo.
index
@ -372,7 +372,7 @@ async fn search_on_exact_field() {
#[actix_rt::test]
async fn phrase_search_on_title() {
let server = Server::new_shared();
let server = Server::new().await;
let documents = json!([
{ "id": 8, "desc": "Document Review", "title": "Document Review Specialist II" },
{ "id": 5, "desc": "Document Review", "title": "Document Review Attorney" },
@ -383,7 +383,7 @@ async fn phrase_search_on_title() {
{ "id": 7, "desc": "Document Review", "title": "Document Review Specialist II" },
{ "id": 6, "desc": "Document Review", "title": "Document Review (Entry Level)" }
]);
let index = index_with_documents(server, &documents).await;
let index = index_with_documents(&server, &documents).await;
index
.search(
@ -416,381 +416,3 @@ async fn phrase_search_on_title() {
)
.await;
}
static NESTED_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
"details": {
"title": "Shazam!",
"desc": "a Captain Marvel ersatz",
"weaknesses": ["magic", "requires transformation"],
"outfit": {
"has_cape": true,
"colors": {
"primary": "red",
"secondary": "gold"
}
}
},
"id": "1",
},
{
"details": {
"title": "Captain Planet",
"desc": "He's not part of the Marvel Cinematic Universe",
"blue_skin": true,
"outfit": {
"has_cape": false
}
},
"id": "2",
},
{
"details": {
"title": "Captain Marvel",
"desc": "a Shazam ersatz",
"weaknesses": ["magic", "power instability"],
"outfit": {
"has_cape": false
}
},
"id": "3",
}])
});
#[actix_rt::test]
async fn nested_search_on_title_with_prefix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Wildcard should match to 'details.' attribute
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "2"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_search_with_suffix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Wildcard should match to any attribute inside 'details.'
// It's worth noting the difference between 'details.*' and '*.title'
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "1"
},
{
"id": "2"
}
]"###);
},
)
.await;
// Should return 1 document (ids: 1)
index
.search(
json!({"q": "gold", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "1"
}
]"###);
},
)
.await;
// Should return 2 documents (ids: 1 and 2)
index
.search(
json!({"q": "true", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "1"
},
{
"id": "2"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
let (task, _status_code) =
index.update_settings_searchable_attributes(json!(["details.title"])).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "2"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
},
)
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(response["hits"].as_array().unwrap().len(), @"0");
},
)
.await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown.*", "*.unknown", "*.title"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "2"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_prefix_search_on_title_with_prefix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3).
index
.search(
json!({"q": "Captain Mar", "attributesToSearchOn": ["*.title"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "2"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_prefix_search_on_details_with_suffix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
index
.search(
json!({"q": "Captain Mar", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "1"
},
{
"id": "2"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3)
index
.search(
json!({"q": "mag", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "1"
},
{
"id": "3"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_search_on_title_matching_strategy_all() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Nested search matching strategy all should only return 1 document (ids: 3)
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.title"], "matchingStrategy": "all", "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
}
]"###);
},
)
.await;
}
#[actix_rt::test]
async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Document 3 should appear before documents 1 and 2
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["*.desc", "*.title"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "1"
},
{
"id": "2"
}
]
"###
);
},
)
.await;
}
#[actix_rt::test]
async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() {
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Document 3 should appear before documents 1 and 2
index
.search(
json!({"q": "Captain Marvel", "attributesToSearchOn": ["details.*"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(code, @"200 OK");
snapshot!(json_string!(response["hits"]),
@r###"
[
{
"id": "3"
},
{
"id": "1"
},
{
"id": "2"
}
]
"###
);
},
)
.await;
}

View File

@ -4,7 +4,7 @@ source: crates/meilisearch/tests/search/distinct.rs
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"indexUid": "tamo",
"status": "succeeded",
"type": "settingsUpdate",
"canceledBy": null,

View File

@ -3,8 +3,8 @@ use crate::json;
#[actix_rt::test]
async fn set_and_reset_distinct_attribute() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(task1.uid()).await.succeeded();
@ -24,8 +24,8 @@ async fn set_and_reset_distinct_attribute() {
#[actix_rt::test]
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(update_task1.uid()).await.succeeded();

View File

@ -11,62 +11,59 @@ macro_rules! test_setting_routes {
#[actix_rt::test]
async fn get_unexisting_index() {
let server = Server::new_shared();
let index_name = uuid::Uuid::new_v4().to_string();
let url = format!("/indexes/{index_name}/settings/{}",
let server = Server::new().await;
let url = format!("/indexes/test/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.get(url).await;
assert_eq!(code, 404, "{response}");
let (_response, code) = server.service.get(url).await;
assert_eq!(code, 404);
}
#[actix_rt::test]
async fn update_unexisting_index() {
let server = Server::new_shared();
let index_name = uuid::Uuid::new_v4().to_string();
let url = format!("/indexes/{index_name}/settings/{}",
let server = Server::new().await;
let url = format!("/indexes/test/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
assert_eq!(code, 202, "{response}");
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
assert_eq!(code, 404, "{response}");
assert_eq!(code, 202, "{}", response);
server.index("").wait_task(0).await;
let (response, code) = server.index("test").get().await;
assert_eq!(code, 200, "{}", response);
}
#[actix_rt::test]
async fn delete_unexisting_index() {
let server = Server::new_shared();
let index_name = uuid::Uuid::new_v4().to_string();
let url = format!("/indexes/{index_name}/settings/{}",
let server = Server::new().await;
let url = format!("/indexes/test/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.delete(url).await;
assert_eq!(code, 202, "{response}");
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
assert_eq!(code, 404, "{response}");
let (_, code) = server.service.delete(url).await;
assert_eq!(code, 202);
let response = server.index("").wait_task(0).await;
assert_eq!(response["status"], "failed");
}
#[actix_rt::test]
async fn get_default() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.create(None).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
let url = format!("/indexes/{}/settings/{}",
index.uid,
assert_eq!(code, 202, "{}", response);
index.wait_task(0).await;
let url = format!("/indexes/test/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.get(url).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
let expected = crate::json!($default_value);
assert_eq!(expected, response);
}
@ -198,16 +195,15 @@ test_setting_routes!(
#[actix_rt::test]
async fn get_settings_unexisting_index() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index.settings().await;
assert_eq!(code, 404, "{response}")
let server = Server::new().await;
let (response, code) = server.index("test").settings().await;
assert_eq!(code, 404, "{}", response)
}
#[actix_rt::test]
async fn get_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
@ -251,8 +247,9 @@ async fn get_settings() {
#[actix_rt::test]
async fn secrets_are_hidden_in_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
@ -272,11 +269,11 @@ async fn secrets_are_hidden_in_settings() {
.await;
meili_snap::snapshot!(code, @"202 Accepted");
meili_snap::snapshot!(meili_snap::json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
meili_snap::snapshot!(meili_snap::json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"taskUid": 1,
"indexUid": "test",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -285,7 +282,7 @@ async fn secrets_are_hidden_in_settings() {
let settings_update_uid = response.uid();
index.wait_task(settings_update_uid).await.succeeded();
index.wait_task(settings_update_uid).await;
let (response, code) = index.settings().await;
meili_snap::snapshot!(code, @"200 OK");
@ -373,16 +370,16 @@ async fn secrets_are_hidden_in_settings() {
#[actix_rt::test]
async fn error_update_settings_unknown_field() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.update_settings(json!({"foo": 12})).await;
assert_eq!(code, 400);
}
#[actix_rt::test]
async fn test_partial_update() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
@ -401,18 +398,20 @@ async fn test_partial_update() {
#[actix_rt::test]
async fn error_delete_settings_unexisting_index() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, code) = index.delete_settings().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
}
#[actix_rt::test]
async fn reset_all_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let documents = json!([
{
@ -424,6 +423,7 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
assert_eq!(response["taskUid"], 0);
index.wait_task(response.uid()).await.succeeded();
let (update_task,_status_code) = index
@ -456,15 +456,17 @@ async fn reset_all_settings() {
#[actix_rt::test]
async fn update_setting_unexisting_index() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, code) = index.update_settings(json!({})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
let (_response, code) = index.get().await;
assert_eq!(code, 200);
let (task, _status_code) = index.delete_settings().await;
index.wait_task(task.uid()).await.succeeded();
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
#[actix_rt::test]
@ -485,8 +487,8 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
#[actix_rt::test]
async fn error_set_invalid_ranking_rules() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
@ -503,8 +505,8 @@ async fn error_set_invalid_ranking_rules() {
#[actix_rt::test]
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(task.uid()).await.succeeded();
@ -524,8 +526,8 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
#[actix_rt::test]
async fn granular_filterable_attributes() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let (response, code) =
@ -543,7 +545,7 @@ async fn granular_filterable_attributes() {
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
snapshot!(json_string!(response["filterableAttributes"]), @r###"
[
{

View File

@ -26,8 +26,8 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
#[actix_rt::test]
async fn attribute_scale_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -38,7 +38,7 @@ async fn attribute_scale_search() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
// the expected order is [1, 3, 2] instead of [3, 1, 2]
@ -99,8 +99,8 @@ async fn attribute_scale_search() {
#[actix_rt::test]
async fn attribute_scale_phrase_search() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -167,8 +167,8 @@ async fn attribute_scale_phrase_search() {
#[actix_rt::test]
async fn word_scale_set_and_reset() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -282,8 +282,8 @@ async fn word_scale_set_and_reset() {
#[actix_rt::test]
async fn attribute_scale_default_ranking_rules() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -293,7 +293,7 @@ async fn attribute_scale_default_ranking_rules() {
"proximityPrecision": "byAttribute"
}))
.await;
assert_eq!("202", code.as_str(), "{response:?}");
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
// the expected order is [3, 1, 2]

View File

@ -5,8 +5,8 @@ use crate::json;
#[actix_rt::test]
async fn set_and_reset() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _code) = index
.update_settings(json!({
@ -70,8 +70,8 @@ async fn set_and_search() {
},
]);
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
@ -224,8 +224,8 @@ async fn advanced_synergies() {
},
]);
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();

View File

@ -6,11 +6,11 @@ use crate::json;
#[actix_rt::test]
async fn similar_unexisting_index() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -26,12 +26,12 @@ async fn similar_unexisting_index() {
#[actix_rt::test]
async fn similar_unexisting_parameter() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
index
.similar(json!({"id": 287947, "marin": "hello"}), |response, code| {
assert_eq!(code, 400, "{response}");
assert_eq!(code, 400, "{}", response);
assert_eq!(response["code"], "bad_request");
})
.await;
@ -39,8 +39,8 @@ async fn similar_unexisting_parameter() {
#[actix_rt::test]
async fn similar_bad_id() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -53,7 +53,7 @@ async fn similar_bad_id() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let (response, code) = index.similar_post(json!({"id": ["doggo"], "embedder": "manual"})).await;
snapshot!(code, @"400 Bad Request");
@ -69,8 +69,8 @@ async fn similar_bad_id() {
#[actix_rt::test]
async fn similar_bad_ranking_score_threshold() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -83,7 +83,7 @@ async fn similar_bad_ranking_score_threshold() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -99,8 +99,8 @@ async fn similar_bad_ranking_score_threshold() {
#[actix_rt::test]
async fn similar_invalid_ranking_score_threshold() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -113,7 +113,7 @@ async fn similar_invalid_ranking_score_threshold() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": 42})).await;
snapshot!(code, @"400 Bad Request");
@ -129,8 +129,8 @@ async fn similar_invalid_ranking_score_threshold() {
#[actix_rt::test]
async fn similar_invalid_id() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -143,7 +143,7 @@ async fn similar_invalid_id() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let (response, code) =
index.similar_post(json!({"id": "http://invalid-docid/", "embedder": "manual"})).await;
@ -160,8 +160,8 @@ async fn similar_invalid_id() {
#[actix_rt::test]
async fn similar_not_found_id() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -174,7 +174,7 @@ async fn similar_not_found_id() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let (response, code) =
index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await;
@ -191,8 +191,8 @@ async fn similar_not_found_id() {
#[actix_rt::test]
async fn similar_bad_offset() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -205,7 +205,7 @@ async fn similar_bad_offset() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let (response, code) =
index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await;
@ -233,8 +233,8 @@ async fn similar_bad_offset() {
#[actix_rt::test]
async fn similar_bad_limit() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -247,7 +247,7 @@ async fn similar_bad_limit() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let (response, code) =
index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await;
@ -277,8 +277,8 @@ async fn similar_bad_limit() {
async fn similar_bad_filter() {
// Since a filter is deserialized as a json Value it will never fail to deserialize.
// Thus the error message is not generated by deserr but written by us.
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -291,7 +291,7 @@ async fn similar_bad_filter() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
snapshot!(code, @"202 Accepted");
@ -316,8 +316,8 @@ async fn similar_bad_filter() {
#[actix_rt::test]
async fn filter_invalid_syntax_object() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -330,7 +330,7 @@ async fn filter_invalid_syntax_object() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -354,8 +354,8 @@ async fn filter_invalid_syntax_object() {
#[actix_rt::test]
async fn filter_invalid_syntax_array() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -368,7 +368,7 @@ async fn filter_invalid_syntax_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -392,8 +392,8 @@ async fn filter_invalid_syntax_array() {
#[actix_rt::test]
async fn filter_invalid_syntax_string() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -406,7 +406,7 @@ async fn filter_invalid_syntax_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -432,8 +432,8 @@ async fn filter_invalid_syntax_string() {
#[actix_rt::test]
async fn filter_invalid_attribute_array() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -446,7 +446,7 @@ async fn filter_invalid_attribute_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -473,8 +473,8 @@ async fn filter_invalid_attribute_array() {
#[actix_rt::test]
async fn filter_invalid_attribute_string() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -487,7 +487,7 @@ async fn filter_invalid_attribute_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -514,8 +514,8 @@ async fn filter_invalid_attribute_string() {
#[actix_rt::test]
async fn filter_reserved_geo_attribute_array() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -528,7 +528,7 @@ async fn filter_reserved_geo_attribute_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -554,8 +554,8 @@ async fn filter_reserved_geo_attribute_array() {
#[actix_rt::test]
async fn filter_reserved_geo_attribute_string() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -568,7 +568,7 @@ async fn filter_reserved_geo_attribute_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -594,8 +594,8 @@ async fn filter_reserved_geo_attribute_string() {
#[actix_rt::test]
async fn filter_reserved_attribute_array() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -608,7 +608,7 @@ async fn filter_reserved_attribute_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -634,8 +634,8 @@ async fn filter_reserved_attribute_array() {
#[actix_rt::test]
async fn filter_reserved_attribute_string() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -648,7 +648,7 @@ async fn filter_reserved_attribute_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -674,8 +674,8 @@ async fn filter_reserved_attribute_string() {
#[actix_rt::test]
async fn filter_reserved_geo_point_array() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -688,7 +688,7 @@ async fn filter_reserved_geo_point_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -714,8 +714,8 @@ async fn filter_reserved_geo_point_array() {
#[actix_rt::test]
async fn filter_reserved_geo_point_string() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -728,7 +728,7 @@ async fn filter_reserved_geo_point_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -754,8 +754,8 @@ async fn filter_reserved_geo_point_string() {
#[actix_rt::test]
async fn similar_bad_retrieve_vectors() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) =
index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await;
@ -806,8 +806,8 @@ async fn similar_bad_retrieve_vectors() {
#[actix_rt::test]
async fn similar_bad_embedder() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -820,7 +820,7 @@ async fn similar_bad_embedder() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;

View File

@ -47,8 +47,8 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn basic() {
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -61,12 +61,12 @@ async fn basic() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
server.wait_task(value.uid()).await.succeeded();
index.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -233,8 +233,8 @@ async fn basic() {
#[actix_rt::test]
async fn ranking_score_threshold() {
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -247,12 +247,12 @@ async fn ranking_score_threshold() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
server.wait_task(value.uid()).await.succeeded();
index.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -503,8 +503,8 @@ async fn ranking_score_threshold() {
#[actix_rt::test]
async fn filter() {
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -517,12 +517,12 @@ async fn filter() {
"filterableAttributes": ["title", "release_year"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
server.wait_task(value.uid()).await.succeeded();
index.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -621,8 +621,8 @@ async fn filter() {
#[actix_rt::test]
async fn limit_and_offset() {
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index
.update_settings(json!({
@ -635,12 +635,12 @@ async fn limit_and_offset() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await;
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
server.wait_task(value.uid()).await.succeeded();
index.wait_task(value.uid()).await.succeeded();
index
.similar(

View File

@ -6,8 +6,8 @@ use crate::common::Server;
use crate::json;
#[actix_rt::test]
async fn get_version() {
let server = Server::new_shared();
async fn get_settings_unexisting_index() {
let server = Server::new().await;
let (response, code) = server.version().await;
assert_eq!(code, 200);
let version = response.as_object().unwrap();
@ -18,7 +18,7 @@ async fn get_version() {
#[actix_rt::test]
async fn test_healthyness() {
let server = Server::new_shared();
let server = Server::new().await;
let (response, status_code) = server.service.get("/health").await;
assert_eq!(status_code, 200);
@ -55,7 +55,7 @@ async fn stats() {
]);
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{response}");
assert_eq!(code, 202, "{}", response);
assert_eq!(response["taskUid"], 1);
index.wait_task(response.uid()).await.succeeded();
@ -78,8 +78,8 @@ async fn stats() {
#[actix_rt::test]
async fn add_remove_embeddings() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
@ -216,8 +216,8 @@ async fn add_remove_embeddings() {
#[actix_rt::test]
async fn add_remove_embedded_documents() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
@ -293,8 +293,8 @@ async fn add_remove_embedded_documents() {
#[actix_rt::test]
async fn update_embedder_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
// 2 embedded documents for 3 embeddings in total
// but no embedders are added in the settings yet so we expect 0 embedded documents for 0 embeddings in total

View File

@ -1,7 +1,8 @@
mod errors;
mod webhook;
use meili_snap::{json_string, snapshot};
use meili_snap::insta::assert_json_snapshot;
use meili_snap::snapshot;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
@ -10,12 +11,14 @@ use crate::json;
#[actix_rt::test]
async fn error_get_unexisting_task_status() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index.get_task(u32::MAX as u64).await;
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(1).await;
let expected_response = json!({
"message": "Task `4294967295` not found.",
"message": "Task `1` not found.",
"code": "task_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#task_not_found"
@ -27,8 +30,8 @@ async fn error_get_unexisting_task_status() {
#[actix_rt::test]
async fn get_task_status() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (create_task, _status_code) = index.create(None).await;
let (add_task, _status_code) = index
.add_documents(
@ -39,7 +42,7 @@ async fn get_task_status() {
None,
)
.await;
server.wait_task(create_task.uid()).await.succeeded();
index.wait_task(create_task.uid()).await.succeeded();
let (_response, code) = index.get_task(add_task.uid()).await;
assert_eq!(code, 200);
// TODO check response format, as per #48
@ -47,11 +50,10 @@ async fn get_task_status() {
#[actix_rt::test]
async fn list_tasks() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -62,7 +64,6 @@ async fn list_tasks() {
#[actix_rt::test]
async fn list_tasks_pagination_and_reverse() {
// do not use a shared server here, as we want to assert tasks ids and we need them to be stable
let server = Server::new().await;
// First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes
let mut last_task = None;
@ -70,7 +71,7 @@ async fn list_tasks_pagination_and_reverse() {
let index = server.index(format!("test-{i}"));
last_task = Some(index.create(None).await.0.uid());
}
server.wait_task(last_task.unwrap()).await.succeeded();
server.wait_task(last_task.unwrap()).await;
let (response, code) = server.tasks_filter("limit=3").await;
assert_eq!(code, 200);
@ -102,14 +103,13 @@ async fn list_tasks_pagination_and_reverse() {
#[actix_rt::test]
async fn list_tasks_with_star_filters() {
let server = Server::new().await;
// Do not use a unique index here, as we want to test the `indexUids=*` filter.
let index = server.index("test");
let (task, _code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.service.get(format!("/tasks?indexUids={}", index.uid)).await;
let (response, code) = index.service.get("/tasks?indexUids=test").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
@ -127,102 +127,93 @@ async fn list_tasks_with_star_filters() {
let (response, code) =
index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await;
assert_eq!(code, 200, "{response:?}");
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get(format!(
"/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}",
index.uid
))
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test")
.await;
assert_eq!(code, 200, "{response:?}");
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*")
.await;
assert_eq!(code, 200, "{response:?}");
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_status_filtered() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.failed();
index.wait_task(task.uid()).await.failed();
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_type_filtered() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) =
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_invalid_canceled_by_filter() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = index
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await;
assert_eq!(code, 200, "{response}");
let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 0);
}
#[actix_rt::test]
async fn list_tasks_status_and_type_filtered() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 0);
let (response, code) = index
@ -232,12 +223,12 @@ async fn list_tasks_status_and_type_filtered() {
&[],
)
.await;
assert_eq!(code, 200, "{response}");
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
macro_rules! assert_valid_summarized_task {
($response:expr, $task_type:literal, $index:tt) => {{
($response:expr, $task_type:literal, $index:literal) => {{
assert_eq!($response.as_object().unwrap().len(), 5);
assert!($response["taskUid"].as_u64().is_some());
assert_eq!($response["indexUid"], $index);
@ -251,49 +242,49 @@ macro_rules! assert_valid_summarized_task {
#[actix_web::test]
async fn test_summarized_task_view() {
let server = Server::new_shared();
let index = server.unique_index();
let index_uid = index.uid.clone();
let server = Server::new().await;
let index = server.index("test");
let (response, _) = index.create(None).await;
assert_valid_summarized_task!(response, "indexCreation", index_uid);
assert_valid_summarized_task!(response, "indexCreation", "test");
let (response, _) = index.update(None).await;
assert_valid_summarized_task!(response, "indexUpdate", index_uid);
assert_valid_summarized_task!(response, "indexUpdate", "test");
let (response, _) = index.update_settings(json!({})).await;
assert_valid_summarized_task!(response, "settingsUpdate", index_uid);
assert_valid_summarized_task!(response, "settingsUpdate", "test");
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
let (response, _) = index.delete_document(1).await;
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
assert_valid_summarized_task!(response, "documentDeletion", "test");
let (response, _) = index.clear_all_documents().await;
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
assert_valid_summarized_task!(response, "documentDeletion", "test");
let (response, _) = index.delete().await;
assert_valid_summarized_task!(response, "indexDeletion", index_uid);
assert_valid_summarized_task!(response, "indexDeletion", "test");
}
#[actix_web::test]
async fn test_summarized_document_addition_or_update() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(0).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@ -311,14 +302,15 @@ async fn test_summarized_document_addition_or_update() {
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@ -337,22 +329,18 @@ async fn test_summarized_document_addition_or_update() {
#[actix_web::test]
async fn test_summarized_delete_documents_by_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let non_existing_task_id1 = u32::MAX as u64;
let non_existing_task_id2 = non_existing_task_id1 - 1;
let non_existing_task_id3 = non_existing_task_id1 - 2;
let (task, _status_code) = index
.delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3])
.await;
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(0).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -362,7 +350,7 @@ async fn test_summarized_delete_documents_by_batch() {
"originalFilter": null
},
"error": {
"message": "Index `[uuid]` not found.",
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -376,14 +364,15 @@ async fn test_summarized_delete_documents_by_batch() {
index.create(None).await;
let (del_task, _status_code) = index.delete_batch(vec![42]).await;
server.wait_task(del_task.uid()).await.succeeded();
index.wait_task(del_task.uid()).await.succeeded();
let (task, _) = index.get_task(del_task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -403,19 +392,20 @@ async fn test_summarized_delete_documents_by_batch() {
#[actix_web::test]
async fn test_summarized_delete_documents_by_filter() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
server.wait_task(task.uid()).await.failed();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -425,7 +415,7 @@ async fn test_summarized_delete_documents_by_filter() {
"originalFilter": "\"doggo = bernese\""
},
"error": {
"message": "Index `[uuid]` not found.",
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -440,14 +430,15 @@ async fn test_summarized_delete_documents_by_filter() {
index.create(None).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
server.wait_task(task.uid()).await.failed();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -457,7 +448,7 @@ async fn test_summarized_delete_documents_by_filter() {
"originalFilter": "\"doggo = bernese\""
},
"error": {
"message": "Index `[uuid]`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
"message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
"code": "invalid_document_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
@ -472,14 +463,15 @@ async fn test_summarized_delete_documents_by_filter() {
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 4,
"batchUid": 4,
"indexUid": "test",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -499,17 +491,18 @@ async fn test_summarized_delete_documents_by_filter() {
#[actix_web::test]
async fn test_summarized_delete_document_by_id() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.delete_document(1).await;
server.wait_task(task.uid()).await.failed();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -519,7 +512,7 @@ async fn test_summarized_delete_document_by_id() {
"originalFilter": null
},
"error": {
"message": "Index `[uuid]` not found.",
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -533,14 +526,15 @@ async fn test_summarized_delete_document_by_id() {
index.create(None).await;
let (task, _status_code) = index.delete_document(42).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -560,12 +554,12 @@ async fn test_summarized_delete_document_by_id() {
#[actix_web::test]
async fn test_summarized_settings_update() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
// here we should find my payload even in the failed task.
let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
"code": "invalid_settings_ranking_rules",
@ -575,14 +569,15 @@ async fn test_summarized_settings_update() {
"###);
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "succeeded",
"type": "settingsUpdate",
"canceledBy": null,
@ -610,17 +605,18 @@ async fn test_summarized_settings_update() {
#[actix_web::test]
async fn test_summarized_index_creation() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "succeeded",
"type": "indexCreation",
"canceledBy": null,
@ -636,14 +632,15 @@ async fn test_summarized_index_creation() {
"###);
let (task, _status_code) = index.create(Some("doggos")).await;
server.wait_task(task.uid()).await.failed();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"status": "failed",
"type": "indexCreation",
"canceledBy": null,
@ -651,7 +648,7 @@ async fn test_summarized_index_creation() {
"primaryKey": "doggos"
},
"error": {
"message": "Index `[uuid]` already exists.",
"message": "Index `test` already exists.",
"code": "index_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_already_exists"
@ -666,16 +663,16 @@ async fn test_summarized_index_creation() {
#[actix_web::test]
async fn test_summarized_index_deletion() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
let (ret, _code) = index.delete().await;
let task = server.wait_task(ret.uid()).await;
let task = index.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"indexUid": "test",
"status": "failed",
"type": "indexDeletion",
"canceledBy": null,
@ -683,7 +680,7 @@ async fn test_summarized_index_deletion() {
"deletedDocuments": 0
},
"error": {
"message": "Index `[uuid]` not found.",
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -700,13 +697,13 @@ async fn test_summarized_index_deletion() {
// both tasks may get autobatched and the deleted documents count will be wrong.
let (ret, _code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
let task = server.wait_task(ret.uid()).await;
let task = index.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@ -723,13 +720,13 @@ async fn test_summarized_index_deletion() {
"###);
let (ret, _code) = index.delete().await;
let task = server.wait_task(ret.uid()).await;
let task = index.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"indexUid": "test",
"status": "succeeded",
"type": "indexDeletion",
"canceledBy": null,
@ -746,13 +743,13 @@ async fn test_summarized_index_deletion() {
// What happens when you delete an index that doesn't exists.
let (ret, _code) = index.delete().await;
let task = server.wait_task(ret.uid()).await;
let task = index.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"indexUid": "test",
"status": "failed",
"type": "indexDeletion",
"canceledBy": null,
@ -760,7 +757,7 @@ async fn test_summarized_index_deletion() {
"deletedDocuments": 0
},
"error": {
"message": "Index `[uuid]` not found.",
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -775,18 +772,19 @@ async fn test_summarized_index_deletion() {
#[actix_web::test]
async fn test_summarized_index_update() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("test");
// If the index doesn't exist yet, we should get errors with or without the primary key.
let (task, _status_code) = index.update(None).await;
server.wait_task(task.uid()).await.failed();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "failed",
"type": "indexUpdate",
"canceledBy": null,
@ -794,7 +792,7 @@ async fn test_summarized_index_update() {
"primaryKey": null
},
"error": {
"message": "Index `[uuid]` not found.",
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -807,14 +805,15 @@ async fn test_summarized_index_update() {
"###);
let (task, _status_code) = index.update(Some("bones")).await;
server.wait_task(task.uid()).await.failed();
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"status": "failed",
"type": "indexUpdate",
"canceledBy": null,
@ -822,7 +821,7 @@ async fn test_summarized_index_update() {
"primaryKey": "bones"
},
"error": {
"message": "Index `[uuid]` not found.",
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -838,14 +837,15 @@ async fn test_summarized_index_update() {
index.create(None).await;
let (task, _status_code) = index.update(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 3,
"batchUid": 3,
"indexUid": "test",
"status": "succeeded",
"type": "indexUpdate",
"canceledBy": null,
@ -861,14 +861,15 @@ async fn test_summarized_index_update() {
"###);
let (task, _status_code) = index.update(Some("bones")).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"uid": 4,
"batchUid": 4,
"indexUid": "test",
"status": "succeeded",
"type": "indexUpdate",
"canceledBy": null,
@ -886,7 +887,7 @@ async fn test_summarized_index_update() {
#[actix_web::test]
async fn test_summarized_index_swap() {
let server = Server::new_shared();
let server = Server::new().await;
let (task, _status_code) = server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
@ -894,11 +895,12 @@ async fn test_summarized_index_swap() {
.await;
server.wait_task(task.uid()).await.failed();
let (task, _) = server.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"uid": 0,
"batchUid": 0,
"indexUid": null,
"status": "failed",
"type": "indexSwap",
@ -926,25 +928,23 @@ async fn test_summarized_index_swap() {
}
"###);
let doggos_index = server.unique_index();
let (task, _code) = doggos_index.create(None).await;
let (task, _code) = server.index("doggos").create(None).await;
server.wait_task(task.uid()).await.succeeded();
let cattos_index = server.unique_index();
let (task, _code) = cattos_index.create(None).await;
let (task, _code) = server.index("cattos").create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = server
.index_swap(json!([
{ "indexes": [doggos_index.uid, cattos_index.uid] }
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(task.uid()).await.succeeded();
let (task, _) = server.get_task(task.uid()).await;
snapshot!(json_string!(task,
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.indexes[0]" => "doggos", ".**.indexes[1]" => "cattos", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"uid": 3,
"batchUid": 3,
"indexUid": null,
"status": "succeeded",
"type": "indexSwap",
@ -970,21 +970,20 @@ async fn test_summarized_index_swap() {
#[actix_web::test]
async fn test_summarized_task_cancelation() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggos");
// to avoid being flaky we're only going to cancel an already finished task :(
let (task, _status_code) = index.create(None).await;
let task_uid = task.uid();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.cancel_tasks("uids=0").await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(json_string!(task,
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"uid": 1,
"batchUid": 1,
"indexUid": null,
"status": "succeeded",
"type": "taskCancelation",
@ -992,7 +991,7 @@ async fn test_summarized_task_cancelation() {
"details": {
"matchedTasks": 1,
"canceledTasks": 0,
"originalFilter": "[of]"
"originalFilter": "?uids=0"
},
"error": null,
"duration": "[duration]",
@ -1005,19 +1004,20 @@ async fn test_summarized_task_cancelation() {
#[actix_web::test]
async fn test_summarized_task_deletion() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggos");
// to avoid being flaky we're only going to delete an already finished task :(
let (task, _status_code) = index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.delete_tasks("uids=0").await;
server.wait_task(task.uid()).await.succeeded();
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"uid": 1,
"batchUid": 1,
"indexUid": null,
"status": "succeeded",
"type": "taskDeletion",
@ -1038,22 +1038,22 @@ async fn test_summarized_task_deletion() {
#[actix_web::test]
async fn test_summarized_dump_creation() {
// Do not use a shared server because it takes too long to create a dump
let server = Server::new().await;
let (task, _status_code) = server.create_dump().await;
server.wait_task(task.uid()).await;
let (task, _) = server.get_task(task.uid()).await;
snapshot!(task,
assert_json_snapshot!(task,
{ ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"uid": 0,
"batchUid": 0,
"indexUid": null,
"status": "succeeded",
"type": "dumpCreation",
"canceledBy": null,
"details": {
"dumpUid": "[dump_uid]"
"dumpUid": "[dumpUid]"
},
"error": null,
"duration": "[duration]",

View File

@ -6,8 +6,8 @@ use crate::vector::generate_default_user_provided_documents;
#[actix_rt::test]
async fn retrieve_binary_quantize_status_in_the_settings() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
@ -65,8 +65,8 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
#[actix_rt::test]
async fn binary_quantize_before_sending_documents() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
@ -139,8 +139,8 @@ async fn binary_quantize_before_sending_documents() {
#[actix_rt::test]
async fn binary_quantize_after_sending_documents() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
@ -226,8 +226,8 @@ async fn binary_quantize_after_sending_documents() {
#[actix_rt::test]
async fn try_to_disable_binary_quantization() {
let server = Server::new_shared();
let index = server.unique_index();
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
@ -256,11 +256,11 @@ async fn try_to_disable_binary_quantization() {
.await;
snapshot!(code, @"202 Accepted");
let ret = server.wait_task(response.uid()).await;
snapshot!(json_string!(ret, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".finishedAt" => "[date]", ".startedAt" => "[date]" }), @r#"
snapshot!(ret, @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"indexUid": "doggo",
"status": "failed",
"type": "settingsUpdate",
"canceledBy": null,
@ -274,7 +274,7 @@ async fn try_to_disable_binary_quantization() {
}
},
"error": {
"message": "Index `[uuid]`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
"message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
"code": "invalid_settings_embedders",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"

View File

@ -50,7 +50,7 @@ impl AttributePatterns {
///
/// * `pattern` - The pattern to match against.
/// * `str` - The string to match against the pattern.
pub fn match_pattern(pattern: &str, str: &str) -> PatternMatch {
fn match_pattern(pattern: &str, str: &str) -> PatternMatch {
// If the pattern is a wildcard, return Match
if pattern == "*" {
return PatternMatch::Match;

View File

@ -1,13 +1,11 @@
use std::cmp::Ordering;
use heed::RoTxn;
use itertools::Itertools;
use roaring::RoaringBitmap;
use crate::score_details::{ScoreDetails, ScoreValue, ScoringStrategy};
use crate::search::new::{distinct_fid, distinct_single_docid};
use crate::search::SemanticSearch;
use crate::{Index, MatchingWords, Result, Search, SearchResult};
use crate::{MatchingWords, Result, Search, SearchResult};
struct ScoreWithRatioResult {
matching_words: MatchingWords,
@ -93,10 +91,7 @@ impl ScoreWithRatioResult {
keyword_results: Self,
from: usize,
length: usize,
distinct: Option<&str>,
index: &Index,
rtxn: &RoTxn<'_>,
) -> Result<(SearchResult, u32)> {
) -> (SearchResult, u32) {
#[derive(Clone, Copy)]
enum ResultSource {
Semantic,
@ -111,9 +106,8 @@ impl ScoreWithRatioResult {
vector_results.document_scores.len() + keyword_results.document_scores.len(),
);
let distinct_fid = distinct_fid(distinct, index, rtxn)?;
let mut excluded_documents = RoaringBitmap::new();
for res in vector_results
let mut documents_seen = RoaringBitmap::new();
for ((docid, (main_score, _sub_score)), source) in vector_results
.document_scores
.into_iter()
.zip(std::iter::repeat(ResultSource::Semantic))
@ -127,33 +121,13 @@ impl ScoreWithRatioResult {
compare_scores(left, right).is_ge()
},
)
// remove documents we already saw and apply distinct rule
.filter_map(|item @ ((docid, _), _)| {
if !excluded_documents.insert(docid) {
// the document was already added, or is indistinct from an already-added document.
return None;
}
if let Some(distinct_fid) = distinct_fid {
if let Err(error) = distinct_single_docid(
index,
rtxn,
distinct_fid,
docid,
&mut excluded_documents,
) {
return Some(Err(error));
}
}
Some(Ok(item))
})
// remove documents we already saw
.filter(|((docid, _), _)| documents_seen.insert(*docid))
// start skipping **after** the filter
.skip(from)
// take **after** skipping
.take(length)
{
let ((docid, (main_score, _sub_score)), source) = res?;
if let ResultSource::Semantic = source {
semantic_hit_count += 1;
}
@ -162,24 +136,10 @@ impl ScoreWithRatioResult {
document_scores.push(main_score);
}
// compute the set of candidates from both sets
let candidates = vector_results.candidates | keyword_results.candidates;
let must_remove_redundant_candidates = distinct_fid.is_some();
let candidates = if must_remove_redundant_candidates {
// patch-up the candidates to remove the indistinct documents, then add back the actual hits
let mut candidates = candidates - excluded_documents;
for docid in &documents_ids {
candidates.insert(*docid);
}
candidates
} else {
candidates
};
Ok((
(
SearchResult {
matching_words: keyword_results.matching_words,
candidates,
candidates: vector_results.candidates | keyword_results.candidates,
documents_ids,
document_scores,
degraded: vector_results.degraded | keyword_results.degraded,
@ -187,7 +147,7 @@ impl ScoreWithRatioResult {
| keyword_results.used_negative_operator,
},
semantic_hit_count,
))
)
}
}
@ -266,15 +226,8 @@ impl Search<'_> {
let keyword_results = ScoreWithRatioResult::new(keyword_results, 1.0 - semantic_ratio);
let vector_results = ScoreWithRatioResult::new(vector_results, semantic_ratio);
let (merge_results, semantic_hit_count) = ScoreWithRatioResult::merge(
vector_results,
keyword_results,
self.offset,
self.limit,
search.distinct.as_deref(),
search.index,
search.rtxn,
)?;
let (merge_results, semantic_hit_count) =
ScoreWithRatioResult::merge(vector_results, keyword_results, self.offset, self.limit);
assert!(merge_results.documents_ids.len() <= self.limit);
Ok((merge_results, Some(semantic_hit_count)))
}

View File

@ -4,9 +4,7 @@ use super::logger::SearchLogger;
use super::ranking_rules::{BoxRankingRule, RankingRuleQueryTrait};
use super::SearchContext;
use crate::score_details::{ScoreDetails, ScoringStrategy};
use crate::search::new::distinct::{
apply_distinct_rule, distinct_fid, distinct_single_docid, DistinctOutput,
};
use crate::search::new::distinct::{apply_distinct_rule, distinct_single_docid, DistinctOutput};
use crate::{Result, TimeBudget};
pub struct BucketSortOutput {
@ -37,7 +35,16 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
logger.ranking_rules(&ranking_rules);
logger.initial_universe(universe);
let distinct_fid = distinct_fid(distinct, ctx.index, ctx.txn)?;
let distinct_field = match distinct {
Some(distinct) => Some(distinct),
None => ctx.index.distinct_field(ctx.txn)?,
};
let distinct_fid = if let Some(field) = distinct_field {
ctx.index.fields_ids_map(ctx.txn)?.id(field)
} else {
None
};
if universe.len() < from as u64 {
return Ok(BucketSortOutput {

View File

@ -9,7 +9,7 @@ use crate::heed_codec::facet::{
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec, FieldDocIdFacetCodec,
};
use crate::heed_codec::BytesRefCodec;
use crate::{FieldId, Index, Result, SearchContext};
use crate::{Index, Result, SearchContext};
pub struct DistinctOutput {
pub remaining: RoaringBitmap,
@ -121,18 +121,3 @@ pub fn facet_string_values<'a>(
fn facet_values_prefix_key(distinct: u16, id: u32) -> [u8; FID_SIZE + DOCID_SIZE] {
concat_arrays::concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes())
}
pub fn distinct_fid(
query_distinct_field: Option<&str>,
index: &Index,
rtxn: &RoTxn<'_>,
) -> Result<Option<FieldId>> {
let distinct_field = match query_distinct_field {
Some(distinct) => Some(distinct),
None => index.distinct_field(rtxn)?,
};
let distinct_fid =
if let Some(field) = distinct_field { index.fields_ids_map(rtxn)?.id(field) } else { None };
Ok(distinct_fid)
}

View File

@ -28,7 +28,6 @@ use std::time::Duration;
use bucket_sort::{bucket_sort, BucketSortOutput};
use charabia::{Language, TokenizerBuilder};
use db_cache::DatabaseCache;
pub use distinct::{distinct_fid, distinct_single_docid};
use exact_attribute::ExactAttribute;
use graph_based_ranking_rule::{Exactness, Fid, Position, Proximity, Typo};
use heed::RoTxn;
@ -52,7 +51,6 @@ pub use self::geo_sort::{Parameter as GeoSortParameter, Strategy as GeoSortStrat
use self::graph_based_ranking_rule::Words;
use self::interner::Interned;
use self::vector_sort::VectorSort;
use crate::attribute_patterns::{match_pattern, PatternMatch};
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::index::PrefixSearch;
use crate::localized_attributes_rules::LocalizedFieldIds;
@ -121,37 +119,17 @@ impl<'ctx> SearchContext<'ctx> {
let searchable_fields_weights = self.index.searchable_fields_and_weights(self.txn)?;
let exact_attributes_ids = self.index.exact_attributes_ids(self.txn)?;
let mut universal_wildcard = false;
let mut wildcard = false;
let mut restricted_fids = RestrictedFids::default();
for field_name in attributes_to_search_on {
if field_name == "*" {
universal_wildcard = true;
wildcard = true;
// we cannot early exit as we want to returns error in case of unknown fields
continue;
}
let searchable_weight =
searchable_fields_weights.iter().find(|(name, _, _)| name == field_name);
// The field is not searchable but may contain a wildcard pattern
if searchable_weight.is_none() && field_name.contains("*") {
let matching_searchable_weights: Vec<_> = searchable_fields_weights
.iter()
.filter(|(name, _, _)| match_pattern(field_name, name) == PatternMatch::Match)
.collect();
if !matching_searchable_weights.is_empty() {
for (_name, fid, weight) in matching_searchable_weights {
if exact_attributes_ids.contains(fid) {
restricted_fids.exact.push((*fid, *weight));
} else {
restricted_fids.tolerant.push((*fid, *weight));
}
}
continue;
}
}
let (fid, weight) = match searchable_weight {
// The Field id exist and the field is searchable
Some((_name, fid, weight)) => (*fid, *weight),
@ -181,7 +159,7 @@ impl<'ctx> SearchContext<'ctx> {
};
}
if universal_wildcard {
if wildcard {
self.restricted_fids = None;
} else {
self.restricted_fids = Some(restricted_fids);

View File

@ -202,11 +202,11 @@ pub fn number_of_typos_allowed<'ctx>(
Ok(Box::new(move |word: &str| {
if !authorize_typos
|| word.chars().count() < min_len_one_typo as usize
|| word.len() < min_len_one_typo as usize
|| exact_words.as_ref().is_some_and(|fst| fst.contains(word))
{
0
} else if word.chars().count() < min_len_two_typos as usize {
} else if word.len() < min_len_two_typos as usize {
1
} else {
2
@ -380,62 +380,4 @@ mod tests {
Ok(())
}
#[test]
fn test_unicode_typo_tolerance_fixed() -> Result<()> {
let temp_index = temp_index_with_documents();
let rtxn = temp_index.read_txn()?;
let ctx = SearchContext::new(&temp_index, &rtxn)?;
let nbr_typos = number_of_typos_allowed(&ctx)?;
// ASCII word "doggy" (5 chars, 5 bytes)
let ascii_word = "doggy";
let ascii_typos = nbr_typos(ascii_word);
// Cyrillic word "собак" (5 chars, 10 bytes)
let cyrillic_word = "собак";
let cyrillic_typos = nbr_typos(cyrillic_word);
// Both words have 5 characters, so they should have the same typo tolerance
assert_eq!(
ascii_typos, cyrillic_typos,
"Words with same character count should get same typo tolerance"
);
// With default settings (oneTypo=5, twoTypos=9), 5-char words should get 1 typo
assert_eq!(ascii_typos, 1, "5-character word should get 1 typo tolerance");
assert_eq!(cyrillic_typos, 1, "5-character word should get 1 typo tolerance");
Ok(())
}
#[test]
fn test_various_unicode_scripts() -> Result<()> {
let temp_index = temp_index_with_documents();
let rtxn = temp_index.read_txn()?;
let ctx = SearchContext::new(&temp_index, &rtxn)?;
let nbr_typos = number_of_typos_allowed(&ctx)?;
// Let's use 5-character words for consistent testing
let five_char_words = vec![
("doggy", "ASCII"), // 5 chars, 5 bytes
("café!", "Accented"), // 5 chars, 7 bytes
("собак", "Cyrillic"), // 5 chars, 10 bytes
];
let expected_typos = 1; // With default settings, 5-char words get 1 typo
for (word, script) in five_char_words {
let typos = nbr_typos(word);
assert_eq!(
typos, expected_typos,
"{} word '{}' should get {} typo(s)",
script, word, expected_typos
);
}
Ok(())
}
}

View File

@ -72,7 +72,7 @@ fn test_2gram_simple() {
let index = create_index();
index
.update_settings(|s| {
s.set_authorize_typos(false);
s.set_autorize_typos(false);
})
.unwrap();
@ -103,7 +103,7 @@ fn test_3gram_simple() {
let index = create_index();
index
.update_settings(|s| {
s.set_authorize_typos(false);
s.set_autorize_typos(false);
})
.unwrap();
@ -153,7 +153,7 @@ fn test_no_disable_ngrams() {
let index = create_index();
index
.update_settings(|s| {
s.set_authorize_typos(false);
s.set_autorize_typos(false);
})
.unwrap();
@ -179,7 +179,7 @@ fn test_2gram_prefix() {
let index = create_index();
index
.update_settings(|s| {
s.set_authorize_typos(false);
s.set_autorize_typos(false);
})
.unwrap();
@ -208,7 +208,7 @@ fn test_3gram_prefix() {
let index = create_index();
index
.update_settings(|s| {
s.set_authorize_typos(false);
s.set_autorize_typos(false);
})
.unwrap();
@ -260,7 +260,7 @@ fn test_disable_split_words() {
let index = create_index();
index
.update_settings(|s| {
s.set_authorize_typos(false);
s.set_autorize_typos(false);
})
.unwrap();

View File

@ -151,7 +151,7 @@ fn test_no_typo() {
let index = create_index();
index
.update_settings(|s| {
s.set_authorize_typos(false);
s.set_autorize_typos(false);
})
.unwrap();

View File

@ -19,7 +19,10 @@ use crate::update::{
};
use crate::vector::settings::{EmbedderSource, EmbeddingSettings};
use crate::vector::EmbeddingConfigs;
use crate::{db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult};
use crate::{
db_snap, obkv_to_json, Filter, FilterableAttributesRule, Index, Search, SearchResult,
ThreadPoolNoAbortBuilder,
};
pub(crate) struct TempIndex {
pub inner: Index,
@ -59,8 +62,15 @@ impl TempIndex {
wtxn: &mut RwTxn<'t>,
documents: Mmap,
) -> Result<(), crate::error::Error> {
let local_pool;
let indexer_config = &self.indexer_config;
let pool = &indexer_config.thread_pool;
let pool = match &indexer_config.thread_pool {
Some(pool) => pool,
None => {
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
&local_pool
}
};
let rtxn = self.inner.read_txn()?;
let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?;
@ -143,8 +153,15 @@ impl TempIndex {
wtxn: &mut RwTxn<'t>,
external_document_ids: Vec<String>,
) -> Result<(), crate::error::Error> {
let local_pool;
let indexer_config = &self.indexer_config;
let pool = &indexer_config.thread_pool;
let pool = match &indexer_config.thread_pool {
Some(pool) => pool,
None => {
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
&local_pool
}
};
let rtxn = self.inner.read_txn()?;
let db_fields_ids_map = self.inner.fields_ids_map(&rtxn)?;
@ -214,8 +231,15 @@ fn aborting_indexation() {
let mut wtxn = index.inner.write_txn().unwrap();
let should_abort = AtomicBool::new(false);
let local_pool;
let indexer_config = &index.indexer_config;
let pool = &indexer_config.thread_pool;
let pool = match &indexer_config.thread_pool {
Some(pool) => pool,
None => {
local_pool = ThreadPoolNoAbortBuilder::new().build().unwrap();
&local_pool
}
};
let rtxn = index.inner.read_txn().unwrap();
let db_fields_ids_map = index.inner.fields_ids_map(&rtxn).unwrap();

View File

@ -54,10 +54,6 @@ impl ThreadPoolNoAbortBuilder {
ThreadPoolNoAbortBuilder::default()
}
pub fn new_for_indexing() -> ThreadPoolNoAbortBuilder {
ThreadPoolNoAbortBuilder::default().thread_name(|index| format!("indexing-thread:{index}"))
}
pub fn thread_name<F>(mut self, closure: F) -> Self
where
F: FnMut(usize) -> String + 'static,

View File

@ -33,6 +33,7 @@ use crate::documents::{obkv_to_object, DocumentsBatchReader};
use crate::error::{Error, InternalError};
use crate::index::{PrefixSearch, PrefixSettings};
use crate::progress::Progress;
use crate::thread_pool_no_abort::ThreadPoolNoAbortBuilder;
pub use crate::update::index_documents::helpers::CursorClonableMmap;
use crate::update::{
IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst,
@ -227,7 +228,24 @@ where
let possible_embedding_mistakes =
crate::vector::error::PossibleEmbeddingMistakes::new(&field_distribution);
let pool = &self.indexer_config.thread_pool;
let backup_pool;
let pool = match self.indexer_config.thread_pool {
Some(ref pool) => pool,
None => {
// We initialize a backup pool with the default
// settings if none have already been set.
#[allow(unused_mut)]
let mut pool_builder = ThreadPoolNoAbortBuilder::new();
#[cfg(test)]
{
pool_builder = pool_builder.num_threads(1);
}
backup_pool = pool_builder.build()?;
&backup_pool
}
};
// create LMDB writer channel
let (lmdb_writer_sx, lmdb_writer_rx): (

View File

@ -1,7 +1,7 @@
use grenad::CompressionType;
use super::GrenadParameters;
use crate::{thread_pool_no_abort::ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
use crate::thread_pool_no_abort::ThreadPoolNoAbort;
#[derive(Debug)]
pub struct IndexerConfig {
@ -9,10 +9,9 @@ pub struct IndexerConfig {
pub max_nb_chunks: Option<usize>,
pub documents_chunk_size: Option<usize>,
pub max_memory: Option<usize>,
pub max_threads: Option<usize>,
pub chunk_compression_type: CompressionType,
pub chunk_compression_level: Option<u32>,
pub thread_pool: ThreadPoolNoAbort,
pub thread_pool: Option<ThreadPoolNoAbort>,
pub max_positions_per_attributes: Option<u32>,
pub skip_index_budget: bool,
}
@ -28,39 +27,16 @@ impl IndexerConfig {
}
}
/// By default use only 1 thread for indexing in tests
#[cfg(test)]
pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option<usize>) {
let pool = ThreadPoolNoAbortBuilder::new_for_indexing()
.num_threads(1)
.build()
.expect("failed to build default rayon thread pool");
(pool, Some(1))
}
#[cfg(not(test))]
pub fn default_thread_pool_and_threads() -> (ThreadPoolNoAbort, Option<usize>) {
let pool = ThreadPoolNoAbortBuilder::new_for_indexing()
.build()
.expect("failed to build default rayon thread pool");
(pool, None)
}
impl Default for IndexerConfig {
fn default() -> Self {
let (thread_pool, max_threads) = default_thread_pool_and_threads();
Self {
max_threads,
thread_pool,
log_every_n: None,
max_nb_chunks: None,
documents_chunk_size: None,
max_memory: None,
chunk_compression_type: CompressionType::None,
chunk_compression_level: None,
thread_pool: None,
max_positions_per_attributes: None,
skip_index_budget: false,
}

View File

@ -5,7 +5,7 @@ pub use self::concurrent_available_ids::ConcurrentAvailableIds;
pub use self::facet::bulk::FacetsUpdateBulk;
pub use self::facet::incremental::FacetsUpdateIncrementalInner;
pub use self::index_documents::*;
pub use self::indexer_config::{default_thread_pool_and_threads, IndexerConfig};
pub use self::indexer_config::IndexerConfig;
pub use self::new::ChannelCongestion;
pub use self::settings::{validate_embedding_settings, Setting, Settings};
pub use self::update_step::UpdateIndexingStep;

View File

@ -8,7 +8,7 @@ use hashbrown::HashMap;
use serde_json::Value;
use super::super::cache::BalancedCaches;
use super::facet_document::{extract_document_facets, extract_geo_document};
use super::facet_document::extract_document_facets;
use super::FacetKind;
use crate::fields_ids_map::metadata::Metadata;
use crate::filterable_attributes_rules::match_faceted_field;
@ -90,12 +90,17 @@ impl FacetedDocidsExtractor {
let mut cached_sorter = context.data.borrow_mut_or_yield();
let mut del_add_facet_value = DelAddFacetValue::new(&context.doc_alloc);
let docid = document_change.docid();
// Using a macro avoid borrowing the parameters as mutable in both closures at
// the same time by postponing their creation
macro_rules! facet_fn {
(del) => {
|fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| {
let res = match document_change {
DocumentChange::Deletion(inner) => extract_document_facets(
inner.current(rtxn, index, context.db_fields_ids_map)?,
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
filterable_attributes,
sortable_fields,
asc_desc_fields,
distinct_field,
is_geo_enabled,
&mut |fid, meta, depth, value| {
Self::facet_fn_with_options(
&context.doc_alloc,
cached_sorter.deref_mut(),
@ -109,52 +114,10 @@ impl FacetedDocidsExtractor {
depth,
value,
)
}
};
(add) => {
|fid: FieldId, meta: Metadata, depth: perm_json_p::Depth, value: &Value| {
Self::facet_fn_with_options(
&context.doc_alloc,
cached_sorter.deref_mut(),
BalancedCaches::insert_add_u32,
&mut del_add_facet_value,
DelAddFacetValue::insert_add,
docid,
fid,
meta,
filterable_attributes,
depth,
value,
)
}
};
}
match document_change {
DocumentChange::Deletion(inner) => {
let mut del = facet_fn!(del);
extract_document_facets(
inner.current(rtxn, index, context.db_fields_ids_map)?,
new_fields_ids_map.deref_mut(),
filterable_attributes,
sortable_fields,
asc_desc_fields,
distinct_field,
&mut del,
)?;
if is_geo_enabled {
extract_geo_document(
inner.current(rtxn, index, context.db_fields_ids_map)?,
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
&mut del,
)?;
}
}
},
),
DocumentChange::Update(inner) => {
let has_changed_for_facets = inner.has_changed_for_fields(
let has_changed = inner.has_changed_for_fields(
&mut |field_name| {
match_faceted_field(
field_name,
@ -168,74 +131,93 @@ impl FacetedDocidsExtractor {
index,
context.db_fields_ids_map,
)?;
// 1. Maybe update doc
if has_changed_for_facets {
extract_document_facets(
inner.current(rtxn, index, context.db_fields_ids_map)?,
new_fields_ids_map.deref_mut(),
filterable_attributes,
sortable_fields,
asc_desc_fields,
distinct_field,
&mut facet_fn!(del),
)?;
extract_document_facets(
inner.merged(rtxn, index, context.db_fields_ids_map)?,
new_fields_ids_map.deref_mut(),
filterable_attributes,
sortable_fields,
asc_desc_fields,
distinct_field,
&mut facet_fn!(add),
)?;
let has_changed_for_geo_fields =
inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?;
if !has_changed && !has_changed_for_geo_fields {
return Ok(());
}
// 2. Maybe update geo
if is_geo_enabled
&& inner.has_changed_for_geo_fields(rtxn, index, context.db_fields_ids_map)?
{
extract_geo_document(
inner.current(rtxn, index, context.db_fields_ids_map)?,
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
&mut facet_fn!(del),
)?;
extract_geo_document(
inner.merged(rtxn, index, context.db_fields_ids_map)?,
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
&mut facet_fn!(add),
)?;
}
}
DocumentChange::Insertion(inner) => {
let mut add = facet_fn!(add);
extract_document_facets(
inner.inserted(),
inner.current(rtxn, index, context.db_fields_ids_map)?,
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
filterable_attributes,
sortable_fields,
asc_desc_fields,
distinct_field,
&mut add,
is_geo_enabled,
&mut |fid, meta, depth, value| {
Self::facet_fn_with_options(
&context.doc_alloc,
cached_sorter.deref_mut(),
BalancedCaches::insert_del_u32,
&mut del_add_facet_value,
DelAddFacetValue::insert_del,
docid,
fid,
meta,
filterable_attributes,
depth,
value,
)
},
)?;
if is_geo_enabled {
extract_geo_document(
inner.inserted(),
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
&mut add,
)?;
}
extract_document_facets(
inner.merged(rtxn, index, context.db_fields_ids_map)?,
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
filterable_attributes,
sortable_fields,
asc_desc_fields,
distinct_field,
is_geo_enabled,
&mut |fid, meta, depth, value| {
Self::facet_fn_with_options(
&context.doc_alloc,
cached_sorter.deref_mut(),
BalancedCaches::insert_add_u32,
&mut del_add_facet_value,
DelAddFacetValue::insert_add,
docid,
fid,
meta,
filterable_attributes,
depth,
value,
)
},
)
}
DocumentChange::Insertion(inner) => extract_document_facets(
inner.inserted(),
inner.external_document_id(),
new_fields_ids_map.deref_mut(),
filterable_attributes,
sortable_fields,
asc_desc_fields,
distinct_field,
is_geo_enabled,
&mut |fid, meta, depth, value| {
Self::facet_fn_with_options(
&context.doc_alloc,
cached_sorter.deref_mut(),
BalancedCaches::insert_add_u32,
&mut del_add_facet_value,
DelAddFacetValue::insert_add,
docid,
fid,
meta,
filterable_attributes,
depth,
value,
)
},
),
};
del_add_facet_value.send_data(docid, sender, &context.doc_alloc).unwrap();
Ok(())
res
}
#[allow(clippy::too_many_arguments)]

View File

@ -15,11 +15,13 @@ use crate::{
#[allow(clippy::too_many_arguments)]
pub fn extract_document_facets<'doc>(
document: impl Document<'doc>,
external_document_id: &str,
field_id_map: &mut GlobalFieldsIdsMap,
filterable_attributes: &[FilterableAttributesRule],
sortable_fields: &HashSet<String>,
asc_desc_fields: &HashSet<String>,
distinct_field: &Option<String>,
is_geo_enabled: bool,
facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>,
) -> Result<()> {
// return the match result for the given field name.
@ -99,24 +101,17 @@ pub fn extract_document_facets<'doc>(
}
}
Ok(())
}
if is_geo_enabled {
if let Some(geo_value) = document.geo_field()? {
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map
.id_with_metadata_or_insert("_geo.lat")
.zip(field_id_map.id_with_metadata_or_insert("_geo.lng"))
.ok_or(UserError::AttributeLimitReached)?;
pub fn extract_geo_document<'doc>(
document: impl Document<'doc>,
external_document_id: &str,
field_id_map: &mut GlobalFieldsIdsMap,
facet_fn: &mut impl FnMut(FieldId, Metadata, perm_json_p::Depth, &Value) -> Result<()>,
) -> Result<()> {
if let Some(geo_value) = document.geo_field()? {
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
let ((lat_fid, lat_meta), (lng_fid, lng_meta)) = field_id_map
.id_with_metadata_or_insert("_geo.lat")
.zip(field_id_map.id_with_metadata_or_insert("_geo.lng"))
.ok_or(UserError::AttributeLimitReached)?;
facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?;
facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?;
facet_fn(lat_fid, lat_meta, perm_json_p::Depth::OnBaseKey, &lat.into())?;
facet_fn(lng_fid, lng_meta, perm_json_p::Depth::OnBaseKey, &lng.into())?;
}
}
}

View File

@ -336,7 +336,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
self.primary_key = Setting::Set(primary_key);
}
pub fn set_authorize_typos(&mut self, val: bool) {
pub fn set_autorize_typos(&mut self, val: bool) {
self.authorize_typos = Setting::Set(val);
}

View File

@ -792,7 +792,7 @@ fn test_disable_typo() {
index
.update_settings_using_wtxn(&mut txn, |settings| {
settings.set_authorize_typos(false);
settings.set_autorize_typos(false);
})
.unwrap();