mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-22 20:56:04 +00:00
Compare commits
1 Commits
prototype-
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b01539fc9 |
2
.github/workflows/db-change-comments.yml
vendored
2
.github/workflows/db-change-comments.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
if: github.event.label.name == 'db change'
|
||||
steps:
|
||||
- name: Add comment
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
||||
@@ -158,9 +158,6 @@ pub enum KindDump {
|
||||
UpgradeDatabase {
|
||||
from: (u32, u32, u32),
|
||||
},
|
||||
IndexCompaction {
|
||||
index_uid: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl From<Task> for TaskDump {
|
||||
@@ -243,9 +240,6 @@ impl From<KindWithContent> for KindDump {
|
||||
KindWithContent::UpgradeDatabase { from: version } => {
|
||||
KindDump::UpgradeDatabase { from: version }
|
||||
}
|
||||
KindWithContent::IndexCompaction { index_uid } => {
|
||||
KindDump::IndexCompaction { index_uid }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -234,9 +234,6 @@ impl<'a> Dump<'a> {
|
||||
}
|
||||
}
|
||||
KindDump::UpgradeDatabase { from } => KindWithContent::UpgradeDatabase { from },
|
||||
KindDump::IndexCompaction { index_uid } => {
|
||||
KindWithContent::IndexCompaction { index_uid }
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -220,9 +220,11 @@ impl IndexMap {
|
||||
uuid: &Uuid,
|
||||
enable_mdb_writemap: bool,
|
||||
map_size_growth: usize,
|
||||
) -> Option<EnvClosingEvent> {
|
||||
let index = self.available.remove(uuid)?;
|
||||
Some(self.close(*uuid, index, enable_mdb_writemap, map_size_growth))
|
||||
) {
|
||||
let Some(index) = self.available.remove(uuid) else {
|
||||
return;
|
||||
};
|
||||
self.close(*uuid, index, enable_mdb_writemap, map_size_growth);
|
||||
}
|
||||
|
||||
fn close(
|
||||
@@ -231,21 +233,14 @@ impl IndexMap {
|
||||
index: Index,
|
||||
enable_mdb_writemap: bool,
|
||||
map_size_growth: usize,
|
||||
) -> EnvClosingEvent {
|
||||
) {
|
||||
let map_size = index.map_size() + map_size_growth;
|
||||
let closing_event = index.prepare_for_closing();
|
||||
let generation = self.next_generation();
|
||||
self.unavailable.insert(
|
||||
uuid,
|
||||
Some(ClosingIndex {
|
||||
uuid,
|
||||
closing_event: closing_event.clone(),
|
||||
enable_mdb_writemap,
|
||||
map_size,
|
||||
generation,
|
||||
}),
|
||||
Some(ClosingIndex { uuid, closing_event, enable_mdb_writemap, map_size, generation }),
|
||||
);
|
||||
closing_event
|
||||
}
|
||||
|
||||
/// Attempts to delete and index.
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::time::Duration;
|
||||
use std::{fs, thread};
|
||||
|
||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||
use meilisearch_types::heed::{Database, Env, EnvClosingEvent, RoTxn, RwTxn, WithoutTls};
|
||||
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn, WithoutTls};
|
||||
use meilisearch_types::milli;
|
||||
use meilisearch_types::milli::database_stats::DatabaseStats;
|
||||
use meilisearch_types::milli::index::RollbackOutcome;
|
||||
@@ -341,24 +341,6 @@ impl IndexMapper {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Closes the specified index.
|
||||
///
|
||||
/// This operation involves closing the underlying environment and so can take a long time to complete.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// - If the Index corresponding to the passed name is concurrently being deleted/resized or cannot be found in the
|
||||
/// in memory hash map.
|
||||
pub fn close_index(&self, rtxn: &RoTxn, name: &str) -> Result<Option<EnvClosingEvent>> {
|
||||
let uuid = self
|
||||
.index_mapping
|
||||
.get(rtxn, name)?
|
||||
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
|
||||
|
||||
// We remove the index from the in-memory index map.
|
||||
Ok(self.index_map.write().unwrap().close_for_resize(&uuid, self.enable_mdb_writemap, 0))
|
||||
}
|
||||
|
||||
/// Return an index, may open it if it wasn't already opened.
|
||||
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
||||
if let Some((current_name, current_index)) =
|
||||
|
||||
@@ -317,9 +317,6 @@ fn snapshot_details(d: &Details) -> String {
|
||||
Details::UpgradeDatabase { from, to } => {
|
||||
format!("{{ from: {from:?}, to: {to:?} }}")
|
||||
}
|
||||
Details::IndexCompaction { index_uid, pre_compaction_size, post_compaction_size } => {
|
||||
format!("{{ index_uid: {index_uid:?}, pre_compaction_size: {pre_compaction_size:?}, post_compaction_size: {post_compaction_size:?} }}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -138,16 +138,6 @@ make_enum_progress! {
|
||||
}
|
||||
}
|
||||
|
||||
make_enum_progress! {
|
||||
pub enum IndexCompaction {
|
||||
RetrieveTheIndex,
|
||||
CreateTemporaryFile,
|
||||
CopyAndCompactTheIndex,
|
||||
PersistTheCompactedIndex,
|
||||
CloseTheIndex,
|
||||
}
|
||||
}
|
||||
|
||||
make_enum_progress! {
|
||||
pub enum InnerSwappingTwoIndexes {
|
||||
RetrieveTheTasks,
|
||||
|
||||
@@ -25,7 +25,6 @@ enum AutobatchKind {
|
||||
IndexDeletion,
|
||||
IndexUpdate,
|
||||
IndexSwap,
|
||||
IndexCompaction,
|
||||
}
|
||||
|
||||
impl AutobatchKind {
|
||||
@@ -69,7 +68,6 @@ impl From<KindWithContent> for AutobatchKind {
|
||||
KindWithContent::IndexCreation { .. } => AutobatchKind::IndexCreation,
|
||||
KindWithContent::IndexUpdate { .. } => AutobatchKind::IndexUpdate,
|
||||
KindWithContent::IndexSwap { .. } => AutobatchKind::IndexSwap,
|
||||
KindWithContent::IndexCompaction { .. } => AutobatchKind::IndexCompaction,
|
||||
KindWithContent::TaskCancelation { .. }
|
||||
| KindWithContent::TaskDeletion { .. }
|
||||
| KindWithContent::DumpCreation { .. }
|
||||
@@ -120,9 +118,6 @@ pub enum BatchKind {
|
||||
IndexSwap {
|
||||
id: TaskId,
|
||||
},
|
||||
IndexCompaction {
|
||||
id: TaskId,
|
||||
},
|
||||
}
|
||||
|
||||
impl BatchKind {
|
||||
@@ -188,13 +183,6 @@ impl BatchKind {
|
||||
)),
|
||||
false,
|
||||
),
|
||||
K::IndexCompaction => (
|
||||
Break((
|
||||
BatchKind::IndexCompaction { id: task_id },
|
||||
BatchStopReason::TaskCannotBeBatched { kind, id: task_id },
|
||||
)),
|
||||
false,
|
||||
),
|
||||
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
|
||||
K::DocumentImport { allow_index_creation, primary_key: pk }
|
||||
if primary_key.is_none() || pk.is_none() || primary_key == pk.as_deref() =>
|
||||
@@ -299,10 +287,8 @@ impl BatchKind {
|
||||
};
|
||||
|
||||
match (self, autobatch_kind) {
|
||||
// We don't batch any of these operations
|
||||
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition | K::IndexCompaction) => {
|
||||
Break((this, BatchStopReason::TaskCannotBeBatched { kind, id }))
|
||||
},
|
||||
// We don't batch any of these operations
|
||||
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition) => Break((this, BatchStopReason::TaskCannotBeBatched { kind, id })),
|
||||
// We must not batch tasks that don't have the same index creation rights if the index doesn't already exists.
|
||||
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
||||
Break((this, BatchStopReason::IndexCreationMismatch { id }))
|
||||
@@ -497,7 +483,6 @@ impl BatchKind {
|
||||
| BatchKind::IndexDeletion { .. }
|
||||
| BatchKind::IndexUpdate { .. }
|
||||
| BatchKind::IndexSwap { .. }
|
||||
| BatchKind::IndexCompaction { .. }
|
||||
| BatchKind::DocumentEdition { .. },
|
||||
_,
|
||||
) => {
|
||||
|
||||
@@ -55,10 +55,6 @@ pub(crate) enum Batch {
|
||||
UpgradeDatabase {
|
||||
tasks: Vec<Task>,
|
||||
},
|
||||
IndexCompaction {
|
||||
index_uid: String,
|
||||
task: Task,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -114,8 +110,7 @@ impl Batch {
|
||||
| Batch::Dump(task)
|
||||
| Batch::IndexCreation { task, .. }
|
||||
| Batch::Export { task }
|
||||
| Batch::IndexUpdate { task, .. }
|
||||
| Batch::IndexCompaction { task, .. } => {
|
||||
| Batch::IndexUpdate { task, .. } => {
|
||||
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
||||
}
|
||||
Batch::SnapshotCreation(tasks)
|
||||
@@ -160,8 +155,7 @@ impl Batch {
|
||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||
IndexCreation { index_uid, .. }
|
||||
| IndexUpdate { index_uid, .. }
|
||||
| IndexDeletion { index_uid, .. }
|
||||
| IndexCompaction { index_uid, .. } => Some(index_uid),
|
||||
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -181,7 +175,6 @@ impl fmt::Display for Batch {
|
||||
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
|
||||
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
|
||||
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
|
||||
Batch::IndexCompaction { .. } => f.write_str("IndexCompaction")?,
|
||||
Batch::Export { .. } => f.write_str("Export")?,
|
||||
Batch::UpgradeDatabase { .. } => f.write_str("UpgradeDatabase")?,
|
||||
};
|
||||
@@ -437,12 +430,6 @@ impl IndexScheduler {
|
||||
current_batch.processing(Some(&mut task));
|
||||
Ok(Some(Batch::IndexSwap { task }))
|
||||
}
|
||||
BatchKind::IndexCompaction { id } => {
|
||||
let mut task =
|
||||
self.queue.tasks.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
current_batch.processing(Some(&mut task));
|
||||
Ok(Some(Batch::IndexCompaction { index_uid, task }))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,26 +1,22 @@
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
use std::io::{Seek, SeekFrom};
|
||||
use std::panic::{catch_unwind, AssertUnwindSafe};
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use byte_unit::Byte;
|
||||
use meilisearch_types::batches::{BatchEnqueuedAt, BatchId};
|
||||
use meilisearch_types::heed::{RoTxn, RwTxn};
|
||||
use meilisearch_types::milli::heed::CompactionOption;
|
||||
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
||||
use meilisearch_types::milli::{self, ChannelCongestion};
|
||||
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
|
||||
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||
use milli::update::Settings as MilliSettings;
|
||||
use roaring::RoaringBitmap;
|
||||
use tempfile::PersistError;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::create_batch::Batch;
|
||||
use crate::processing::{
|
||||
AtomicBatchStep, AtomicTaskStep, CreateIndexProgress, DeleteIndexProgress, FinalizingIndexStep,
|
||||
IndexCompaction, InnerSwappingTwoIndexes, SwappingTheIndexes, TaskCancelationProgress,
|
||||
TaskDeletionProgress, UpdateIndexProgress,
|
||||
InnerSwappingTwoIndexes, SwappingTheIndexes, TaskCancelationProgress, TaskDeletionProgress,
|
||||
UpdateIndexProgress,
|
||||
};
|
||||
use crate::utils::{
|
||||
self, remove_n_tasks_datetime_earlier_than, remove_task_datetime, swap_index_uid_in_task,
|
||||
@@ -422,46 +418,6 @@ impl IndexScheduler {
|
||||
task.status = Status::Succeeded;
|
||||
Ok((vec![task], ProcessBatchInfo::default()))
|
||||
}
|
||||
Batch::IndexCompaction { index_uid: _, mut task } => {
|
||||
let KindWithContent::IndexCompaction { index_uid } = &task.kind else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let ret = catch_unwind(AssertUnwindSafe(|| {
|
||||
self.apply_compaction(&rtxn, &progress, index_uid)
|
||||
}));
|
||||
|
||||
let (pre_size, post_size) = match ret {
|
||||
Ok(Ok(stats)) => stats,
|
||||
Ok(Err(Error::AbortedTask)) => return Err(Error::AbortedTask),
|
||||
Ok(Err(e)) => return Err(e),
|
||||
Err(e) => {
|
||||
let msg = match e.downcast_ref::<&'static str>() {
|
||||
Some(s) => *s,
|
||||
None => match e.downcast_ref::<String>() {
|
||||
Some(s) => &s[..],
|
||||
None => "Box<dyn Any>",
|
||||
},
|
||||
};
|
||||
return Err(Error::Export(Box::new(Error::ProcessBatchPanicked(
|
||||
msg.to_string(),
|
||||
))));
|
||||
}
|
||||
};
|
||||
|
||||
task.status = Status::Succeeded;
|
||||
if let Some(Details::IndexCompaction {
|
||||
index_uid: _,
|
||||
pre_compaction_size,
|
||||
post_compaction_size,
|
||||
}) = task.details.as_mut()
|
||||
{
|
||||
*pre_compaction_size = Some(Byte::from_u64(pre_size));
|
||||
*post_compaction_size = Some(Byte::from_u64(post_size));
|
||||
}
|
||||
Ok((vec![task], ProcessBatchInfo::default()))
|
||||
}
|
||||
Batch::Export { mut task } => {
|
||||
let KindWithContent::Export { url, api_key, payload_size, indexes } = &task.kind
|
||||
else {
|
||||
@@ -537,58 +493,6 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_compaction(
|
||||
&self,
|
||||
rtxn: &RoTxn,
|
||||
progress: &Progress,
|
||||
index_uid: &str,
|
||||
) -> Result<(u64, u64)> {
|
||||
// 1. Verify that the index exists
|
||||
if !self.index_mapper.index_exists(rtxn, index_uid)? {
|
||||
return Err(Error::IndexNotFound(index_uid.to_owned()));
|
||||
}
|
||||
|
||||
// 2. We retrieve the index and create a temporary file in the index directory
|
||||
progress.update_progress(IndexCompaction::RetrieveTheIndex);
|
||||
let index = self.index_mapper.index(rtxn, index_uid)?;
|
||||
progress.update_progress(IndexCompaction::CreateTemporaryFile);
|
||||
let pre_size = std::fs::metadata(index.path().join("data.mdb"))?.len();
|
||||
let mut file = tempfile::Builder::new()
|
||||
.suffix("data.")
|
||||
.prefix(".mdb.cpy")
|
||||
.tempfile_in(index.path())?;
|
||||
|
||||
// 3. We copy the index data to the temporary file
|
||||
progress.update_progress(IndexCompaction::CopyAndCompactTheIndex);
|
||||
index
|
||||
.copy_to_file(file.as_file_mut(), CompactionOption::Enabled)
|
||||
.map_err(|error| Error::Milli { error, index_uid: Some(index_uid.to_string()) })?;
|
||||
// ...and reset the file position as specified in the documentation
|
||||
file.seek(SeekFrom::Start(0))?;
|
||||
|
||||
// 4. We replace the index data file with the temporary file
|
||||
progress.update_progress(IndexCompaction::PersistTheCompactedIndex);
|
||||
let post_size = file.as_file().metadata()?.len();
|
||||
match file.persist(index.path().join("data.mdb")) {
|
||||
Ok(file) => file.sync_all()?,
|
||||
// TODO see if we have a _resource busy_ error and probably handle this by:
|
||||
// 1. closing the index, 2. replacing and 3. reopening it
|
||||
Err(PersistError { error, file: _ }) => return Err(Error::IoError(error)),
|
||||
};
|
||||
|
||||
// 5. prepare to close the index and wait for it
|
||||
// The next time the index is opened, it will use the new compacted data
|
||||
let closing_event = self.index_mapper.close_index(rtxn, index_uid)?;
|
||||
|
||||
if let Some(closing_event) = closing_event {
|
||||
progress.update_progress(IndexCompaction::CloseTheIndex);
|
||||
drop(index);
|
||||
closing_event.wait();
|
||||
}
|
||||
|
||||
Ok((pre_size, post_size))
|
||||
}
|
||||
|
||||
/// Swap the index `lhs` with the index `rhs`.
|
||||
fn apply_index_swap(
|
||||
&self,
|
||||
|
||||
@@ -722,7 +722,7 @@ fn basic_get_stats() {
|
||||
let kind = index_creation_task("whalo", "fish");
|
||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
||||
{
|
||||
"indexes": {
|
||||
"catto": 1,
|
||||
@@ -742,7 +742,6 @@ fn basic_get_stats() {
|
||||
"documentEdition": 0,
|
||||
"dumpCreation": 0,
|
||||
"export": 0,
|
||||
"indexCompaction": 0,
|
||||
"indexCreation": 3,
|
||||
"indexDeletion": 0,
|
||||
"indexSwap": 0,
|
||||
@@ -754,10 +753,10 @@ fn basic_get_stats() {
|
||||
"upgradeDatabase": 0
|
||||
}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
handle.advance_till([Start, BatchCreated]);
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
||||
{
|
||||
"indexes": {
|
||||
"catto": 1,
|
||||
@@ -777,7 +776,6 @@ fn basic_get_stats() {
|
||||
"documentEdition": 0,
|
||||
"dumpCreation": 0,
|
||||
"export": 0,
|
||||
"indexCompaction": 0,
|
||||
"indexCreation": 3,
|
||||
"indexDeletion": 0,
|
||||
"indexSwap": 0,
|
||||
@@ -789,7 +787,7 @@ fn basic_get_stats() {
|
||||
"upgradeDatabase": 0
|
||||
}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
handle.advance_till([
|
||||
InsideProcessBatch,
|
||||
@@ -799,7 +797,7 @@ fn basic_get_stats() {
|
||||
Start,
|
||||
BatchCreated,
|
||||
]);
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
||||
{
|
||||
"indexes": {
|
||||
"catto": 1,
|
||||
@@ -819,7 +817,6 @@ fn basic_get_stats() {
|
||||
"documentEdition": 0,
|
||||
"dumpCreation": 0,
|
||||
"export": 0,
|
||||
"indexCompaction": 0,
|
||||
"indexCreation": 3,
|
||||
"indexDeletion": 0,
|
||||
"indexSwap": 0,
|
||||
@@ -831,7 +828,7 @@ fn basic_get_stats() {
|
||||
"upgradeDatabase": 0
|
||||
}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// now we make one more batch, the started_at field of the new tasks will be past `second_start_time`
|
||||
handle.advance_till([
|
||||
@@ -842,7 +839,7 @@ fn basic_get_stats() {
|
||||
Start,
|
||||
BatchCreated,
|
||||
]);
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r###"
|
||||
snapshot!(json_string!(index_scheduler.get_stats().unwrap()), @r#"
|
||||
{
|
||||
"indexes": {
|
||||
"catto": 1,
|
||||
@@ -862,7 +859,6 @@ fn basic_get_stats() {
|
||||
"documentEdition": 0,
|
||||
"dumpCreation": 0,
|
||||
"export": 0,
|
||||
"indexCompaction": 0,
|
||||
"indexCreation": 3,
|
||||
"indexDeletion": 0,
|
||||
"indexSwap": 0,
|
||||
@@ -874,7 +870,7 @@ fn basic_get_stats() {
|
||||
"upgradeDatabase": 0
|
||||
}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -256,15 +256,14 @@ pub fn swap_index_uid_in_task(task: &mut Task, swap: (&str, &str)) {
|
||||
use KindWithContent as K;
|
||||
let mut index_uids = vec![];
|
||||
match &mut task.kind {
|
||||
K::DocumentAdditionOrUpdate { index_uid, .. }
|
||||
| K::DocumentEdition { index_uid, .. }
|
||||
| K::DocumentDeletion { index_uid, .. }
|
||||
| K::DocumentDeletionByFilter { index_uid, .. }
|
||||
| K::DocumentClear { index_uid }
|
||||
| K::SettingsUpdate { index_uid, .. }
|
||||
| K::IndexDeletion { index_uid }
|
||||
| K::IndexCreation { index_uid, .. }
|
||||
| K::IndexCompaction { index_uid, .. } => index_uids.push(index_uid),
|
||||
K::DocumentAdditionOrUpdate { index_uid, .. } => index_uids.push(index_uid),
|
||||
K::DocumentEdition { index_uid, .. } => index_uids.push(index_uid),
|
||||
K::DocumentDeletion { index_uid, .. } => index_uids.push(index_uid),
|
||||
K::DocumentDeletionByFilter { index_uid, .. } => index_uids.push(index_uid),
|
||||
K::DocumentClear { index_uid } => index_uids.push(index_uid),
|
||||
K::SettingsUpdate { index_uid, .. } => index_uids.push(index_uid),
|
||||
K::IndexDeletion { index_uid } => index_uids.push(index_uid),
|
||||
K::IndexCreation { index_uid, .. } => index_uids.push(index_uid),
|
||||
K::IndexUpdate { index_uid, new_index_uid, .. } => {
|
||||
index_uids.push(index_uid);
|
||||
if let Some(new_uid) = new_index_uid {
|
||||
@@ -619,13 +618,6 @@ impl crate::IndexScheduler {
|
||||
Details::UpgradeDatabase { from: _, to: _ } => {
|
||||
assert_eq!(kind.as_kind(), Kind::UpgradeDatabase);
|
||||
}
|
||||
Details::IndexCompaction {
|
||||
index_uid: _,
|
||||
pre_compaction_size: _,
|
||||
post_compaction_size: _,
|
||||
} => {
|
||||
assert_eq!(kind.as_kind(), Kind::IndexCompaction);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -109,7 +109,6 @@ impl HeedAuthStore {
|
||||
Action::IndexesGet,
|
||||
Action::IndexesUpdate,
|
||||
Action::IndexesSwap,
|
||||
Action::IndexesCompact,
|
||||
]
|
||||
.iter(),
|
||||
);
|
||||
|
||||
@@ -380,9 +380,6 @@ pub enum Action {
|
||||
#[serde(rename = "webhooks.*")]
|
||||
#[deserr(rename = "webhooks.*")]
|
||||
WebhooksAll,
|
||||
#[serde(rename = "indexes.compact")]
|
||||
#[deserr(rename = "indexes.compact")]
|
||||
IndexesCompact,
|
||||
}
|
||||
|
||||
impl Action {
|
||||
@@ -401,7 +398,6 @@ impl Action {
|
||||
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
||||
INDEXES_DELETE => Some(Self::IndexesDelete),
|
||||
INDEXES_SWAP => Some(Self::IndexesSwap),
|
||||
INDEXES_COMPACT => Some(Self::IndexesCompact),
|
||||
TASKS_ALL => Some(Self::TasksAll),
|
||||
TASKS_CANCEL => Some(Self::TasksCancel),
|
||||
TASKS_DELETE => Some(Self::TasksDelete),
|
||||
@@ -466,7 +462,6 @@ impl Action {
|
||||
IndexesUpdate => false,
|
||||
IndexesDelete => false,
|
||||
IndexesSwap => false,
|
||||
IndexesCompact => false,
|
||||
TasksCancel => false,
|
||||
TasksDelete => false,
|
||||
TasksGet => true,
|
||||
@@ -518,7 +513,6 @@ pub mod actions {
|
||||
pub const INDEXES_UPDATE: u8 = IndexesUpdate.repr();
|
||||
pub const INDEXES_DELETE: u8 = IndexesDelete.repr();
|
||||
pub const INDEXES_SWAP: u8 = IndexesSwap.repr();
|
||||
pub const INDEXES_COMPACT: u8 = IndexesCompact.repr();
|
||||
pub const TASKS_ALL: u8 = TasksAll.repr();
|
||||
pub const TASKS_CANCEL: u8 = TasksCancel.repr();
|
||||
pub const TASKS_DELETE: u8 = TasksDelete.repr();
|
||||
@@ -620,7 +614,6 @@ pub(crate) mod test {
|
||||
assert!(WebhooksDelete.repr() == 47 && WEBHOOKS_DELETE == 47);
|
||||
assert!(WebhooksCreate.repr() == 48 && WEBHOOKS_CREATE == 48);
|
||||
assert!(WebhooksAll.repr() == 49 && WEBHOOKS_ALL == 49);
|
||||
assert!(IndexesCompact.repr() == 50 && INDEXES_COMPACT == 50);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -142,11 +142,6 @@ pub struct DetailsView {
|
||||
pub old_index_uid: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub new_index_uid: Option<String>,
|
||||
// index compaction
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pre_compaction_size: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub post_compaction_size: Option<String>,
|
||||
}
|
||||
|
||||
impl DetailsView {
|
||||
@@ -319,24 +314,6 @@ impl DetailsView {
|
||||
// We should never be able to batch multiple renames at the same time.
|
||||
(Some(left), Some(_right)) => Some(left),
|
||||
},
|
||||
pre_compaction_size: match (
|
||||
self.pre_compaction_size.clone(),
|
||||
other.pre_compaction_size.clone(),
|
||||
) {
|
||||
(None, None) => None,
|
||||
(None, Some(size)) | (Some(size), None) => Some(size),
|
||||
// We should never be able to batch multiple renames at the same time.
|
||||
(Some(left), Some(_right)) => Some(left),
|
||||
},
|
||||
post_compaction_size: match (
|
||||
self.post_compaction_size.clone(),
|
||||
other.post_compaction_size.clone(),
|
||||
) {
|
||||
(None, None) => None,
|
||||
(None, Some(size)) | (Some(size), None) => Some(size),
|
||||
// We should never be able to batch multiple renames at the same time.
|
||||
(Some(left), Some(_right)) => Some(left),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -438,15 +415,6 @@ impl From<Details> for DetailsView {
|
||||
upgrade_to: Some(format!("v{}.{}.{}", to.0, to.1, to.2)),
|
||||
..Default::default()
|
||||
},
|
||||
Details::IndexCompaction { pre_compaction_size, post_compaction_size, .. } => {
|
||||
DetailsView {
|
||||
pre_compaction_size: pre_compaction_size
|
||||
.map(|size| size.get_appropriate_unit(UnitType::Both).to_string()),
|
||||
post_compaction_size: post_compaction_size
|
||||
.map(|size| size.get_appropriate_unit(UnitType::Both).to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,8 +67,7 @@ impl Task {
|
||||
| SettingsUpdate { index_uid, .. }
|
||||
| IndexCreation { index_uid, .. }
|
||||
| IndexUpdate { index_uid, .. }
|
||||
| IndexDeletion { index_uid }
|
||||
| IndexCompaction { index_uid } => Some(index_uid),
|
||||
| IndexDeletion { index_uid } => Some(index_uid),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,8 +94,7 @@ impl Task {
|
||||
| KindWithContent::DumpCreation { .. }
|
||||
| KindWithContent::SnapshotCreation
|
||||
| KindWithContent::Export { .. }
|
||||
| KindWithContent::UpgradeDatabase { .. }
|
||||
| KindWithContent::IndexCompaction { .. } => None,
|
||||
| KindWithContent::UpgradeDatabase { .. } => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -172,9 +170,6 @@ pub enum KindWithContent {
|
||||
UpgradeDatabase {
|
||||
from: (u32, u32, u32),
|
||||
},
|
||||
IndexCompaction {
|
||||
index_uid: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
|
||||
@@ -211,7 +206,6 @@ impl KindWithContent {
|
||||
KindWithContent::SnapshotCreation => Kind::SnapshotCreation,
|
||||
KindWithContent::Export { .. } => Kind::Export,
|
||||
KindWithContent::UpgradeDatabase { .. } => Kind::UpgradeDatabase,
|
||||
KindWithContent::IndexCompaction { .. } => Kind::IndexCompaction,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -232,8 +226,7 @@ impl KindWithContent {
|
||||
| DocumentClear { index_uid }
|
||||
| SettingsUpdate { index_uid, .. }
|
||||
| IndexCreation { index_uid, .. }
|
||||
| IndexDeletion { index_uid }
|
||||
| IndexCompaction { index_uid } => vec![index_uid],
|
||||
| IndexDeletion { index_uid } => vec![index_uid],
|
||||
IndexUpdate { index_uid, new_index_uid, .. } => {
|
||||
let mut indexes = vec![index_uid.as_str()];
|
||||
if let Some(new_uid) = new_index_uid {
|
||||
@@ -332,11 +325,6 @@ impl KindWithContent {
|
||||
versioning::VERSION_PATCH,
|
||||
),
|
||||
}),
|
||||
KindWithContent::IndexCompaction { index_uid } => Some(Details::IndexCompaction {
|
||||
index_uid: index_uid.clone(),
|
||||
pre_compaction_size: None,
|
||||
post_compaction_size: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -419,11 +407,6 @@ impl KindWithContent {
|
||||
versioning::VERSION_PATCH,
|
||||
),
|
||||
}),
|
||||
KindWithContent::IndexCompaction { index_uid } => Some(Details::IndexCompaction {
|
||||
index_uid: index_uid.clone(),
|
||||
pre_compaction_size: None,
|
||||
post_compaction_size: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -486,11 +469,6 @@ impl From<&KindWithContent> for Option<Details> {
|
||||
versioning::VERSION_PATCH,
|
||||
),
|
||||
}),
|
||||
KindWithContent::IndexCompaction { index_uid } => Some(Details::IndexCompaction {
|
||||
index_uid: index_uid.clone(),
|
||||
pre_compaction_size: None,
|
||||
post_compaction_size: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -601,7 +579,6 @@ pub enum Kind {
|
||||
SnapshotCreation,
|
||||
Export,
|
||||
UpgradeDatabase,
|
||||
IndexCompaction,
|
||||
}
|
||||
|
||||
impl Kind {
|
||||
@@ -613,8 +590,7 @@ impl Kind {
|
||||
| Kind::SettingsUpdate
|
||||
| Kind::IndexCreation
|
||||
| Kind::IndexDeletion
|
||||
| Kind::IndexUpdate
|
||||
| Kind::IndexCompaction => true,
|
||||
| Kind::IndexUpdate => true,
|
||||
Kind::IndexSwap
|
||||
| Kind::TaskCancelation
|
||||
| Kind::TaskDeletion
|
||||
@@ -642,7 +618,6 @@ impl Display for Kind {
|
||||
Kind::SnapshotCreation => write!(f, "snapshotCreation"),
|
||||
Kind::Export => write!(f, "export"),
|
||||
Kind::UpgradeDatabase => write!(f, "upgradeDatabase"),
|
||||
Kind::IndexCompaction => write!(f, "indexCompaction"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -678,8 +653,6 @@ impl FromStr for Kind {
|
||||
Ok(Kind::Export)
|
||||
} else if kind.eq_ignore_ascii_case("upgradeDatabase") {
|
||||
Ok(Kind::UpgradeDatabase)
|
||||
} else if kind.eq_ignore_ascii_case("indexCompaction") {
|
||||
Ok(Kind::IndexCompaction)
|
||||
} else {
|
||||
Err(ParseTaskKindError(kind.to_owned()))
|
||||
}
|
||||
@@ -765,11 +738,6 @@ pub enum Details {
|
||||
from: (u32, u32, u32),
|
||||
to: (u32, u32, u32),
|
||||
},
|
||||
IndexCompaction {
|
||||
index_uid: String,
|
||||
pre_compaction_size: Option<Byte>,
|
||||
post_compaction_size: Option<Byte>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
|
||||
@@ -832,10 +800,6 @@ impl Details {
|
||||
Self::ClearAll { deleted_documents } => *deleted_documents = Some(0),
|
||||
Self::TaskCancelation { canceled_tasks, .. } => *canceled_tasks = Some(0),
|
||||
Self::TaskDeletion { deleted_tasks, .. } => *deleted_tasks = Some(0),
|
||||
Self::IndexCompaction { pre_compaction_size, post_compaction_size, .. } => {
|
||||
*pre_compaction_size = None;
|
||||
*post_compaction_size = None;
|
||||
}
|
||||
Self::SettingsUpdate { .. }
|
||||
| Self::IndexInfo { .. }
|
||||
| Self::Dump { .. }
|
||||
|
||||
@@ -91,7 +91,7 @@ time = { version = "0.3.41", features = [
|
||||
] }
|
||||
tokio = { version = "1.45.1", features = ["full"] }
|
||||
toml = "0.8.23"
|
||||
uuid = { version = "1.18.0", features = ["serde", "v4", "v7"] }
|
||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
||||
serde_urlencoded = "0.7.1"
|
||||
termcolor = "1.4.1"
|
||||
url = { version = "2.5.4", features = ["serde"] }
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
use utoipa::OpenApi;
|
||||
|
||||
use super::ActionPolicy;
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::SummarizedTaskView;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
paths(compact),
|
||||
tags(
|
||||
(
|
||||
name = "Compact an index",
|
||||
description = "The /compact route uses compacts the database to reoganize and make it smaller and more efficient.",
|
||||
external_docs(url = "https://www.meilisearch.com/docs/reference/api/compact"),
|
||||
),
|
||||
),
|
||||
)]
|
||||
pub struct CompactApi;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(compact))));
|
||||
}
|
||||
|
||||
/// Compact an index
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "{indexUid}/compact",
|
||||
tag = "Compact an index",
|
||||
security(("Bearer" = ["search", "*"])),
|
||||
params(("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false)),
|
||||
responses(
|
||||
(status = ACCEPTED, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
|
||||
{
|
||||
"taskUid": 147,
|
||||
"indexUid": null,
|
||||
"status": "enqueued",
|
||||
"type": "documentDeletion",
|
||||
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
|
||||
}
|
||||
)),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
{
|
||||
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||
"code": "missing_authorization_header",
|
||||
"type": "auth",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||
}
|
||||
)),
|
||||
)
|
||||
)]
|
||||
pub async fn compact(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_COMPACT }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
analytics.publish(IndexesCompactAggregator, &req);
|
||||
|
||||
let task = KindWithContent::IndexCompaction { index_uid: index_uid.to_string() };
|
||||
let task =
|
||||
match tokio::task::spawn_blocking(move || index_scheduler.register(task, None, false))
|
||||
.await?
|
||||
{
|
||||
Ok(task) => task,
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
debug!(returns = ?task, "Compact the {index_uid} index");
|
||||
Ok(HttpResponse::Accepted().json(SummarizedTaskView::from(task)))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct IndexesCompactAggregator;
|
||||
|
||||
impl Aggregate for IndexesCompactAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Indexes Compacted"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, _new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self)
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
@@ -28,7 +28,6 @@ use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::is_dry_run;
|
||||
use crate::Opt;
|
||||
|
||||
pub mod compact;
|
||||
pub mod documents;
|
||||
mod enterprise_edition;
|
||||
pub mod facet_search;
|
||||
@@ -50,9 +49,8 @@ pub use enterprise_edition::proxy::{PROXY_ORIGIN_REMOTE_HEADER, PROXY_ORIGIN_TAS
|
||||
(path = "/", api = facet_search::FacetSearchApi),
|
||||
(path = "/", api = similar::SimilarApi),
|
||||
(path = "/", api = settings::SettingsApi),
|
||||
(path = "/", api = compact::CompactApi),
|
||||
),
|
||||
paths(list_indexes, create_index, get_index, update_index, delete_index, get_index_stats, compact::compact),
|
||||
paths(list_indexes, create_index, get_index, update_index, delete_index, get_index_stats),
|
||||
tags(
|
||||
(
|
||||
name = "Indexes",
|
||||
@@ -82,8 +80,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/search").configure(search::configure))
|
||||
.service(web::scope("/facet-search").configure(facet_search::configure))
|
||||
.service(web::scope("/similar").configure(similar::configure))
|
||||
.service(web::scope("/settings").configure(settings::configure))
|
||||
.service(web::scope("/compact").configure(compact::configure)),
|
||||
.service(web::scope("/settings").configure(settings::configure)),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ use meilisearch_types::serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
use tracing::debug;
|
||||
use utoipa::{IntoParams, OpenApi};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
@@ -326,8 +325,7 @@ pub async fn search_with_url_query(
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let request_uid = Uuid::now_v7();
|
||||
debug!(request_uid = ?request_uid, parameters = ?params, "Search get");
|
||||
debug!(parameters = ?params, "Search get");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let mut query: SearchQuery = params.into_inner().try_into()?;
|
||||
@@ -353,7 +351,6 @@ pub async fn search_with_url_query(
|
||||
search_kind,
|
||||
retrieve_vector,
|
||||
index_scheduler.features(),
|
||||
request_uid,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
@@ -366,7 +363,7 @@ pub async fn search_with_url_query(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
debug!(request_uid = ?request_uid, returns = ?search_result, "Search get");
|
||||
debug!(returns = ?search_result, "Search get");
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
||||
@@ -435,10 +432,9 @@ pub async fn search_with_post(
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let request_uid = Uuid::now_v7();
|
||||
|
||||
let mut query = params.into_inner();
|
||||
debug!(request_uid = ?request_uid, parameters = ?query, "Search post");
|
||||
debug!(parameters = ?query, "Search post");
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||
@@ -462,7 +458,6 @@ pub async fn search_with_post(
|
||||
search_kind,
|
||||
retrieve_vectors,
|
||||
index_scheduler.features(),
|
||||
request_uid,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
@@ -478,7 +473,7 @@ pub async fn search_with_post(
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
debug!(request_uid = ?request_uid, returns = ?search_result, "Search post");
|
||||
debug!(returns = ?search_result, "Search post");
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
||||
|
||||
@@ -234,7 +234,6 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
facet_stats: _,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
request_uid: _,
|
||||
} = result;
|
||||
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
|
||||
@@ -9,7 +9,6 @@ use meilisearch_types::keys::actions;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
use utoipa::{OpenApi, ToSchema};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::multi_search_analytics::MultiSearchAggregator;
|
||||
use crate::analytics::Analytics;
|
||||
@@ -152,7 +151,6 @@ pub async fn multi_search_with_post(
|
||||
// Since we don't want to process half of the search requests and then get a permit refused
|
||||
// we're going to get one permit for the whole duration of the multi-search request.
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let request_uid = Uuid::now_v7();
|
||||
|
||||
let federated_search = params.into_inner();
|
||||
|
||||
@@ -190,27 +188,14 @@ pub async fn multi_search_with_post(
|
||||
|
||||
let response = match federation {
|
||||
Some(federation) => {
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
federation = ?federation,
|
||||
parameters = ?queries,
|
||||
"Federated-search"
|
||||
);
|
||||
|
||||
// check remote header
|
||||
let is_proxy = req
|
||||
.headers()
|
||||
.get(PROXY_SEARCH_HEADER)
|
||||
.is_some_and(|value| value.as_bytes() == PROXY_SEARCH_HEADER_VALUE.as_bytes());
|
||||
let search_result = perform_federated_search(
|
||||
&index_scheduler,
|
||||
queries,
|
||||
federation,
|
||||
features,
|
||||
is_proxy,
|
||||
request_uid,
|
||||
)
|
||||
.await;
|
||||
let search_result =
|
||||
perform_federated_search(&index_scheduler, queries, federation, features, is_proxy)
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
if search_result.is_ok() {
|
||||
@@ -218,13 +203,6 @@ pub async fn multi_search_with_post(
|
||||
}
|
||||
|
||||
analytics.publish(multi_aggregate, &req);
|
||||
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
returns = ?search_result,
|
||||
"Federated-search"
|
||||
);
|
||||
|
||||
HttpResponse::Ok().json(search_result?)
|
||||
}
|
||||
None => {
|
||||
@@ -238,12 +216,7 @@ pub async fn multi_search_with_post(
|
||||
.map(SearchQueryWithIndex::into_index_query_federation)
|
||||
.enumerate()
|
||||
{
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
on_index = query_index,
|
||||
parameters = ?query,
|
||||
"Multi-search"
|
||||
);
|
||||
debug!(on_index = query_index, parameters = ?query, "Multi-search");
|
||||
|
||||
if federation_options.is_some() {
|
||||
return Err((
|
||||
@@ -285,7 +258,6 @@ pub async fn multi_search_with_post(
|
||||
search_kind,
|
||||
retrieve_vector,
|
||||
features,
|
||||
request_uid,
|
||||
)
|
||||
})
|
||||
.await
|
||||
@@ -314,11 +286,7 @@ pub async fn multi_search_with_post(
|
||||
err
|
||||
})?;
|
||||
|
||||
debug!(
|
||||
request_uid = ?request_uid,
|
||||
returns = ?search_results,
|
||||
"Multi-search"
|
||||
);
|
||||
debug!(returns = ?search_results, "Multi-search");
|
||||
|
||||
HttpResponse::Ok().json(SearchResults { results: search_results })
|
||||
}
|
||||
|
||||
@@ -226,14 +226,14 @@ mod tests {
|
||||
{
|
||||
let params = "types=createIndex";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
snapshot!(meili_snap::json_string!(err), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
|
||||
@@ -17,7 +17,6 @@ use meilisearch_types::milli::vector::Embedding;
|
||||
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget, DEFAULT_VALUES_PER_FACET};
|
||||
use roaring::RoaringBitmap;
|
||||
use tokio::task::JoinHandle;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::super::ranking_rules::{self, RankingRules};
|
||||
use super::super::{
|
||||
@@ -40,7 +39,6 @@ pub async fn perform_federated_search(
|
||||
federation: Federation,
|
||||
features: RoFeatures,
|
||||
is_proxy: bool,
|
||||
request_uid: Uuid,
|
||||
) -> Result<FederatedSearchResult, ResponseError> {
|
||||
if is_proxy {
|
||||
features.check_network("Performing a remote federated search")?;
|
||||
@@ -172,7 +170,6 @@ pub async fn perform_federated_search(
|
||||
facet_stats,
|
||||
facets_by_index,
|
||||
remote_errors: partitioned_queries.has_remote.then_some(remote_errors),
|
||||
request_uid: Some(request_uid),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -442,7 +439,6 @@ fn merge_metadata(
|
||||
degraded: degraded_for_host,
|
||||
used_negative_operator: host_used_negative_operator,
|
||||
remote_errors: _,
|
||||
request_uid: _,
|
||||
} in remote_results
|
||||
{
|
||||
let this_remote_duration = Duration::from_millis(*processing_time_ms as u64);
|
||||
|
||||
@@ -16,7 +16,6 @@ use meilisearch_types::milli::order_by_map::OrderByMap;
|
||||
use meilisearch_types::milli::OrderBy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::super::{ComputedFacets, FacetStats, HitsInfo, SearchHit, SearchQueryWithIndex};
|
||||
use crate::milli::vector::Embedding;
|
||||
@@ -132,8 +131,6 @@ pub struct FederatedSearchResult {
|
||||
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
|
||||
#[serde(default, skip_serializing_if = "FederatedFacets::is_empty")]
|
||||
pub facets_by_index: FederatedFacets,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub request_uid: Option<Uuid>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub remote_errors: Option<BTreeMap<String, ResponseError>>,
|
||||
@@ -159,7 +156,6 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
facet_stats,
|
||||
facets_by_index,
|
||||
remote_errors,
|
||||
request_uid,
|
||||
} = self;
|
||||
|
||||
let mut debug = f.debug_struct("SearchResult");
|
||||
@@ -192,9 +188,6 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
if let Some(remote_errors) = remote_errors {
|
||||
debug.field("remote_errors", &remote_errors);
|
||||
}
|
||||
if let Some(request_uid) = request_uid {
|
||||
debug.field("request_uid", &request_uid);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
|
||||
@@ -36,7 +36,6 @@ use serde_json::{json, Value};
|
||||
#[cfg(test)]
|
||||
mod mod_test;
|
||||
use utoipa::ToSchema;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::MeilisearchHttpError;
|
||||
|
||||
@@ -852,8 +851,6 @@ pub struct SearchResult {
|
||||
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub request_uid: Option<Uuid>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub semantic_hit_count: Option<u32>,
|
||||
@@ -875,7 +872,6 @@ impl fmt::Debug for SearchResult {
|
||||
hits_info,
|
||||
facet_distribution,
|
||||
facet_stats,
|
||||
request_uid,
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
@@ -905,9 +901,6 @@ impl fmt::Debug for SearchResult {
|
||||
if let Some(semantic_hit_count) = semantic_hit_count {
|
||||
debug.field("semantic_hit_count", &semantic_hit_count);
|
||||
}
|
||||
if let Some(request_uid) = request_uid {
|
||||
debug.field("request_uid", &request_uid);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
@@ -1127,7 +1120,6 @@ pub fn perform_search(
|
||||
search_kind: SearchKind,
|
||||
retrieve_vectors: RetrieveVectors,
|
||||
features: RoFeatures,
|
||||
request_uid: Uuid,
|
||||
) -> Result<SearchResult, ResponseError> {
|
||||
let before_search = Instant::now();
|
||||
let rtxn = index.read_txn()?;
|
||||
@@ -1245,7 +1237,6 @@ pub fn perform_search(
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
semantic_hit_count,
|
||||
request_uid: Some(request_uid),
|
||||
};
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
@@ -419,14 +419,14 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r#"
|
||||
{
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`, `indexes.compact`",
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -91,14 +91,14 @@ async fn create_api_key_bad_actions() {
|
||||
// can't parse
|
||||
let (response, code) = server.add_api_key(json!({ "actions": ["doggo"] })).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`, `indexes.compact`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`, `*.get`, `webhooks.get`, `webhooks.update`, `webhooks.delete`, `webhooks.create`, `webhooks.*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -40,14 +40,14 @@ async fn batch_bad_types() {
|
||||
|
||||
let (response, code) = server.batches_filter("types=doggo").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1853,7 +1853,7 @@ async fn add_documents_with_geo_field() {
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// we are expecting docs 4 and 3 first as they have geo
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
|
||||
@r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -1885,8 +1885,7 @@ async fn add_documents_with_geo_field() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1941,7 +1940,7 @@ async fn update_documents_with_geo_field() {
|
||||
let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// we are expecting docs 4 and 3 first as they have geo
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
|
||||
@r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -1973,8 +1972,7 @@ async fn update_documents_with_geo_field() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2046,7 +2044,7 @@ async fn update_documents_with_geo_field() {
|
||||
let (response, code) = index.search_post(json!({"sort": ["_geoPoint(10,0):asc"]})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// the search response should not have changed: we are expecting docs 4 and 3 first as they have geo
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }),
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }),
|
||||
@r###"
|
||||
{
|
||||
"hits": [
|
||||
@@ -2079,8 +2077,7 @@ async fn update_documents_with_geo_field() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -14,18 +14,17 @@ async fn basic_add_settings_and_geojson_documents() {
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
let (response, _) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }),
|
||||
@r###"
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let lille: serde_json::Value = serde_json::from_str(LILLE).unwrap();
|
||||
let documents = json!([
|
||||
@@ -93,8 +92,8 @@ async fn basic_add_settings_and_geojson_documents() {
|
||||
"#);
|
||||
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }),
|
||||
@r###"
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -112,10 +111,9 @@ async fn basic_add_settings_and_geojson_documents() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -176,8 +174,8 @@ async fn basic_add_geojson_documents_and_settings() {
|
||||
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
|
||||
server.wait_task(task.uid()).await.succeeded();
|
||||
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }),
|
||||
@r###"
|
||||
snapshot!(response,
|
||||
@r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -195,10 +193,9 @@ async fn basic_add_geojson_documents_and_settings() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -295,7 +292,7 @@ async fn geo_bounding_box() {
|
||||
let (response, code) =
|
||||
index.search_get("?filter=_geoBoundingBox([50.53987503447863,21.43443989912143],[43.76393151539099,0.54979129195425])&attributesToRetrieve=name").await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -354,17 +351,16 @@ async fn geo_bounding_box() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 17,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 17
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// Between Russia and Alaska
|
||||
let (response, code) = index
|
||||
.search_get("?filter=_geoBoundingBox([70,-148],[63,152])&attributesToRetrieve=name")
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -381,10 +377,46 @@ async fn geo_bounding_box() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn geo_radius() {
|
||||
let index = shared_index_geojson_documents().await;
|
||||
|
||||
// 200km around Luxembourg
|
||||
let (response, code) = index
|
||||
.search_get("?filter=_geoRadius(49.4369862,6.5576591,200000)&attributesToRetrieve=name")
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"name": "Belgium"
|
||||
},
|
||||
{
|
||||
"name": "Germany"
|
||||
},
|
||||
{
|
||||
"name": "France"
|
||||
},
|
||||
{
|
||||
"name": "Luxembourg"
|
||||
},
|
||||
{
|
||||
"name": "Netherlands"
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -742,7 +742,7 @@ async fn vector_filter_all_embedders() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -762,10 +762,9 @@ async fn vector_filter_all_embedders() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -840,7 +839,7 @@ async fn vector_filter_specific_embedder() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -860,10 +859,9 @@ async fn vector_filter_specific_embedder() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -876,7 +874,7 @@ async fn vector_filter_user_provided() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -887,10 +885,9 @@ async fn vector_filter_user_provided() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -903,7 +900,7 @@ async fn vector_filter_specific_fragment() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -917,10 +914,9 @@ async fn vector_filter_specific_fragment() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (value, _code) = index
|
||||
.search_post(json!({
|
||||
@@ -928,7 +924,7 @@ async fn vector_filter_specific_fragment() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -945,10 +941,9 @@ async fn vector_filter_specific_fragment() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -981,17 +976,16 @@ async fn vector_filter_document_template_but_fragments_used() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1029,7 +1023,7 @@ async fn vector_filter_document_template() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1046,10 +1040,9 @@ async fn vector_filter_document_template() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1082,7 +1075,7 @@ async fn vector_filter_negation() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1099,10 +1092,9 @@ async fn vector_filter_negation() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1115,7 +1107,7 @@ async fn vector_filter_or_combination() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1132,10 +1124,9 @@ async fn vector_filter_or_combination() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1148,7 +1139,7 @@ async fn vector_filter_regenerate() {
|
||||
"attributesToRetrieve": ["name"]
|
||||
}))
|
||||
.await;
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1165,8 +1156,7 @@ async fn vector_filter_regenerate() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -63,8 +63,7 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -85,7 +84,7 @@ async fn bug_4640() {
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -124,8 +123,7 @@ async fn bug_4640() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -149,7 +147,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"q": "jean"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -181,8 +179,7 @@ async fn geo_asc_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -195,7 +192,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"q": "bob"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -219,8 +216,7 @@ async fn geo_asc_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -233,7 +229,7 @@ async fn geo_asc_with_words() {
|
||||
&json!({"q": "intel"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -249,8 +245,7 @@ async fn geo_asc_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
},
|
||||
@@ -274,7 +269,7 @@ async fn geo_sort_with_words() {
|
||||
&json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{response}");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -309,8 +304,7 @@ async fn geo_sort_with_words() {
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meili_snap::snapshot;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::common::index::Index;
|
||||
@@ -148,7 +148,7 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -209,10 +209,9 @@ async fn simple_search() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"0");
|
||||
|
||||
let (response, code) = index
|
||||
@@ -221,7 +220,7 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -285,10 +284,9 @@ async fn simple_search() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 2
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"2");
|
||||
|
||||
let (response, code) = index
|
||||
@@ -297,7 +295,7 @@ async fn simple_search() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -361,10 +359,9 @@ async fn simple_search() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 3
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(response["semanticHitCount"], @"3");
|
||||
}
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ async fn simple_search() {
|
||||
// english
|
||||
index
|
||||
.search(json!({"q": "Atta", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -115,8 +115,7 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -126,7 +125,7 @@ async fn simple_search() {
|
||||
// japanese
|
||||
index
|
||||
.search(json!({"q": "進撃", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -137,8 +136,7 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -149,7 +147,7 @@ async fn simple_search() {
|
||||
.search(
|
||||
json!({"q": "進撃", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -160,10 +158,9 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
@@ -172,7 +169,7 @@ async fn simple_search() {
|
||||
// chinese
|
||||
index
|
||||
.search(json!({"q": "进击", "attributesToRetrieve": ["id"]}), |response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -183,10 +180,9 @@ async fn simple_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
snapshot!(code, @"200 OK");
|
||||
})
|
||||
.await;
|
||||
@@ -226,7 +222,7 @@ async fn force_locales() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -237,8 +233,7 @@ async fn force_locales() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -251,7 +246,7 @@ async fn force_locales() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -262,8 +257,7 @@ async fn force_locales() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -306,7 +300,7 @@ async fn force_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -317,8 +311,7 @@ async fn force_locales_with_pattern() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -331,7 +324,7 @@ async fn force_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -342,8 +335,7 @@ async fn force_locales_with_pattern() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -384,15 +376,14 @@ async fn force_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -405,7 +396,7 @@ async fn force_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -416,8 +407,7 @@ async fn force_locales_with_pattern_nested() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -461,15 +451,14 @@ async fn force_different_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -482,7 +471,7 @@ async fn force_different_locales_with_pattern() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -493,8 +482,7 @@ async fn force_different_locales_with_pattern() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -541,15 +529,14 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -562,7 +549,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"], "attributesToSearchOn": ["name_zh", "description_zh"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -573,8 +560,7 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -616,7 +602,7 @@ async fn auto_infer_locales_at_search() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -627,8 +613,7 @@ async fn auto_infer_locales_at_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -640,21 +625,20 @@ async fn auto_infer_locales_at_search() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"id": 853
|
||||
"hits": [
|
||||
{
|
||||
"id": 853
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
@@ -664,7 +648,7 @@ async fn auto_infer_locales_at_search() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -675,8 +659,7 @@ async fn auto_infer_locales_at_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -719,15 +702,14 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -740,21 +722,20 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"id": 852
|
||||
"hits": [
|
||||
{
|
||||
"id": 852
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
"###);
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
@@ -765,7 +746,7 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["ja"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -776,8 +757,7 @@ async fn force_different_locales_with_pattern_nested() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -819,15 +799,14 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -840,15 +819,14 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -884,15 +862,14 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["cmn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -905,15 +882,14 @@ async fn settings_change() {
|
||||
.search(
|
||||
json!({"q": "\"进击的巨人\"", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "\"进击的巨人\"",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1188,7 +1164,7 @@ async fn swedish_search() {
|
||||
// infer swedish
|
||||
index
|
||||
.search(json!({"q": "trä", "attributesToRetrieve": ["product"]}), |response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1202,8 +1178,7 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1212,7 +1187,7 @@ async fn swedish_search() {
|
||||
|
||||
index
|
||||
.search(json!({"q": "tra", "attributesToRetrieve": ["product"]}), |response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1226,8 +1201,7 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1239,7 +1213,7 @@ async fn swedish_search() {
|
||||
.search(
|
||||
json!({"q": "trä", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1253,8 +1227,7 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1265,7 +1238,7 @@ async fn swedish_search() {
|
||||
.search(
|
||||
json!({"q": "tra", "locales": ["swe"], "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1279,8 +1252,7 @@ async fn swedish_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
@@ -1315,21 +1287,20 @@ async fn german_search() {
|
||||
.search(
|
||||
json!({"q": "kulturalität", "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"product": "Interkulturalität"
|
||||
}
|
||||
],
|
||||
"query": "kulturalität",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"product": "Interkulturalität"
|
||||
}
|
||||
"###);
|
||||
],
|
||||
"query": "kulturalität",
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
},
|
||||
)
|
||||
@@ -1339,7 +1310,7 @@ async fn german_search() {
|
||||
.search(
|
||||
json!({"q": "organisation", "attributesToRetrieve": ["product"]}),
|
||||
|response, code| {
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1350,8 +1321,7 @@ async fn german_search() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
@@ -1044,7 +1044,7 @@ async fn test_degraded_score_details() {
|
||||
}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1103,8 +1103,7 @@ async fn test_degraded_score_details() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
},
|
||||
|
||||
@@ -93,14 +93,13 @@ async fn federation_empty_list() {
|
||||
|
||||
let (response, code) = server.multi_search(json!({"federation": {}, "queries": []})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 0
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -165,7 +164,7 @@ async fn simple_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "SHARED_DOCUMENTS",
|
||||
@@ -183,8 +182,7 @@ async fn simple_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
},
|
||||
{
|
||||
"indexUid": "SHARED_DOCUMENTS",
|
||||
@@ -202,8 +200,7 @@ async fn simple_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
]
|
||||
"###);
|
||||
@@ -220,7 +217,7 @@ async fn federation_single_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -240,8 +237,7 @@ async fn federation_single_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -260,7 +256,7 @@ async fn federation_multiple_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -312,8 +308,7 @@ async fn federation_multiple_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 5
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -330,7 +325,7 @@ async fn federation_two_search_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -363,8 +358,7 @@ async fn federation_two_search_single_index() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -463,7 +457,7 @@ async fn simple_search_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response["results"], { ".**.processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "SHARED_DOCUMENTS",
|
||||
@@ -481,8 +475,7 @@ async fn simple_search_two_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
},
|
||||
{
|
||||
"indexUid": "SHARED_NESTED_DOCUMENTS",
|
||||
@@ -523,8 +516,7 @@ async fn simple_search_two_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 2
|
||||
}
|
||||
]
|
||||
"###);
|
||||
@@ -543,7 +535,7 @@ async fn federation_two_search_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -604,8 +596,7 @@ async fn federation_two_search_two_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -635,7 +626,7 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -804,8 +795,7 @@ async fn federation_multiple_search_multiple_indexes() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1111,7 +1101,7 @@ async fn federation_filter() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1150,8 +1140,7 @@ async fn federation_filter() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1188,7 +1177,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1277,8 +1266,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1290,7 +1278,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1365,8 +1353,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1462,7 +1449,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1551,8 +1538,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 4
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1565,7 +1551,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1640,8 +1626,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1719,7 +1704,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1846,8 +1831,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 10,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 10
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1860,7 +1844,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1931,8 +1915,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 6,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 6
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1953,7 +1936,7 @@ async fn federation_sort_different_ranking_rules() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2080,8 +2063,7 @@ async fn federation_sort_different_ranking_rules() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 10,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 10
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2160,7 +2142,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2287,8 +2269,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 10,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 10
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -2301,7 +2282,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2372,8 +2353,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 6,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 6
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2444,7 +2424,7 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2547,8 +2527,7 @@ async fn federation_limit_offset() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2570,7 +2549,7 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2585,8 +2564,7 @@ async fn federation_limit_offset() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2608,7 +2586,7 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2695,8 +2673,7 @@ async fn federation_limit_offset() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 2,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2718,14 +2695,13 @@ async fn federation_limit_offset() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 12,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2755,7 +2731,7 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2885,8 +2861,7 @@ async fn federation_formatting() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2908,7 +2883,7 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2923,8 +2898,7 @@ async fn federation_formatting() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -2946,7 +2920,7 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3033,8 +3007,7 @@ async fn federation_formatting() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 2,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3056,14 +3029,13 @@ async fn federation_formatting() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 12,
|
||||
"estimatedTotalHits": 12,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 12
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3126,7 +3098,7 @@ async fn federation_null_weight() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3165,8 +3137,7 @@ async fn federation_null_weight() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3273,7 +3244,7 @@ async fn federation_federated_contains_facets() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3309,8 +3280,7 @@ async fn federation_federated_contains_facets() {
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -3518,7 +3488,7 @@ async fn federation_vector_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3562,8 +3532,7 @@ async fn federation_vector_single_index() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"semanticHitCount": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"semanticHitCount": 4
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -3576,7 +3545,7 @@ async fn federation_vector_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3620,8 +3589,7 @@ async fn federation_vector_single_index() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"semanticHitCount": 4,
|
||||
"requestUid": "[uuid]"
|
||||
"semanticHitCount": 4
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -3635,7 +3603,7 @@ async fn federation_vector_single_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3683,8 +3651,7 @@ async fn federation_vector_single_index() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"semanticHitCount": 3,
|
||||
"requestUid": "[uuid]"
|
||||
"semanticHitCount": 3
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -3736,7 +3703,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -3955,10 +3922,9 @@ async fn federation_vector_two_indexes() {
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 6,
|
||||
"requestUid": "[uuid]"
|
||||
"semanticHitCount": 6
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// hybrid search, distinct embedder
|
||||
let (response, code) = server
|
||||
@@ -3968,7 +3934,7 @@ async fn federation_vector_two_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]", ".**.requestUid" => "[uuid]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".**._rankingScore" => "[score]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4195,8 +4161,7 @@ async fn federation_vector_two_indexes() {
|
||||
0.6
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 8,
|
||||
"requestUid": "[uuid]"
|
||||
"semanticHitCount": 8
|
||||
}
|
||||
"#);
|
||||
}
|
||||
@@ -4244,7 +4209,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4415,8 +4380,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
},
|
||||
"stats": {}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4435,7 +4399,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4577,8 +4541,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
"Shazam!": 1
|
||||
}
|
||||
},
|
||||
"facetStats": {},
|
||||
"requestUid": "[uuid]"
|
||||
"facetStats": {}
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4598,7 +4561,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4723,8 +4686,7 @@ async fn federation_facets_different_indexes_same_facet() {
|
||||
"distribution": {},
|
||||
"stats": {}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -4786,7 +4748,7 @@ async fn federation_facets_same_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4844,8 +4806,7 @@ async fn federation_facets_same_indexes() {
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4861,7 +4822,7 @@ async fn federation_facets_same_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -4947,8 +4908,7 @@ async fn federation_facets_same_indexes() {
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -4965,7 +4925,7 @@ async fn federation_facets_same_indexes() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -5027,8 +4987,7 @@ async fn federation_facets_same_indexes() {
|
||||
"min": 2.0,
|
||||
"max": 6.0
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -5081,7 +5040,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -5258,8 +5217,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
},
|
||||
"stats": {}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]"
|
||||
}
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -5306,7 +5264,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -5446,8 +5404,7 @@ async fn federation_inconsistent_merge_order() {
|
||||
"Batman Returns": 1
|
||||
}
|
||||
},
|
||||
"facetStats": {},
|
||||
"requestUid": "[uuid]"
|
||||
"facetStats": {}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -229,7 +229,7 @@ async fn remote_sharding() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -287,13 +287,12 @@ async fn remote_sharding() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -351,13 +350,12 @@ async fn remote_sharding() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
let (response, _status_code) = ms2.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -415,7 +413,6 @@ async fn remote_sharding() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 5,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -597,7 +594,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -622,10 +619,9 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// multi vector search: two local queries, one remote
|
||||
|
||||
@@ -673,7 +669,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -698,7 +694,6 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
@@ -749,7 +744,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r#"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -774,7 +769,6 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"#);
|
||||
@@ -825,7 +819,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -845,10 +839,9 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
]
|
||||
},
|
||||
"semanticHitCount": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// multi vector search: no local queries, all remote
|
||||
|
||||
@@ -896,7 +889,7 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"processingTimeMs": "[time]",
|
||||
@@ -920,10 +913,9 @@ async fn remote_sharding_retrieve_vectors() {
|
||||
0.2
|
||||
]
|
||||
},
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -1141,7 +1133,7 @@ async fn error_no_weighted_score() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1169,7 +1161,6 @@ async fn error_no_weighted_score() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote hit does not contain `._federation.weightedScoreValues`\n - hint: check that the remote instance is a Meilisearch instance running the same version",
|
||||
@@ -1281,7 +1272,7 @@ async fn error_bad_response() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1309,7 +1300,6 @@ async fn error_bad_response() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "could not parse response from the remote host as a federated search response:\n - response from remote: <html>Returning an HTML page</html>\n - hint: check that the remote instance is a Meilisearch instance running the same version",
|
||||
@@ -1414,7 +1404,7 @@ async fn error_bad_request() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1442,7 +1432,6 @@ async fn error_bad_request() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.queries[1]`: Index `nottest` not found.\",\"code\":\"index_not_found\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#index_not_found\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -1552,7 +1541,7 @@ async fn error_bad_request_facets_by_index() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1586,7 +1575,6 @@ async fn error_bad_request_facets_by_index() {
|
||||
"stats": {}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test0`: Index `test0` not found.\\n - Note: index `test0` is not used in queries\",\"code\":\"index_not_found\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#index_not_found\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -1699,7 +1687,7 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -1738,7 +1726,6 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
"stats": {}
|
||||
}
|
||||
},
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution: Attribute `id` is not filterable. This index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -2048,7 +2035,7 @@ async fn error_remote_404() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2076,7 +2063,6 @@ async fn error_remote_404() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 404:\n - response from remote: null\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
@@ -2089,7 +2075,7 @@ async fn error_remote_404() {
|
||||
"###);
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2127,7 +2113,6 @@ async fn error_remote_404() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -2259,7 +2244,7 @@ async fn error_remote_sharding_auth() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2287,7 +2272,6 @@ async fn error_remote_sharding_auth() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1-notsearch": {
|
||||
"message": "could not authenticate against the remote host\n - hint: check that the remote instance was registered with a valid API key having the `search` action",
|
||||
@@ -2421,7 +2405,7 @@ async fn remote_sharding_auth() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2469,7 +2453,6 @@ async fn remote_sharding_auth() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -2572,7 +2555,7 @@ async fn error_remote_500() {
|
||||
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2600,7 +2583,6 @@ async fn error_remote_500() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 2,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 500:\n - response from remote: {\"error\":\"provoked error\",\"code\":\"test_error\",\"link\":\"https://docs.meilisearch.com/errors#test_error\"}",
|
||||
@@ -2614,7 +2596,7 @@ async fn error_remote_500() {
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
// the response if full because we queried the instance that works
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2652,7 +2634,6 @@ async fn error_remote_500() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
@@ -2756,7 +2737,7 @@ async fn error_remote_500_once() {
|
||||
// Meilisearch is tolerant to a single failure
|
||||
let (response, _status_code) = ms0.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2794,13 +2775,12 @@ async fn error_remote_500_once() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2838,7 +2818,6 @@ async fn error_remote_500_once() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3,
|
||||
"requestUid": "[uuid]",
|
||||
"remoteErrors": {}
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -175,8 +175,7 @@ async fn test_issue_5274() {
|
||||
|
||||
snapshot!(json_string!(rep, {
|
||||
".processingTimeMs" => "[ignored]",
|
||||
".requestUid" => "[uuid]"
|
||||
}), @r###"
|
||||
}), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -190,8 +189,7 @@ async fn test_issue_5274() {
|
||||
"hitsPerPage": 1,
|
||||
"page": 1,
|
||||
"totalPages": 1,
|
||||
"totalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"totalHits": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
@@ -108,7 +108,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
@@ -119,7 +119,7 @@ async fn task_bad_types() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`, `indexCompaction`.",
|
||||
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
---
|
||||
source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
{
|
||||
"hits": [
|
||||
@@ -20,6 +21,5 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"processingTimeMs": "[duration]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]"
|
||||
"estimatedTotalHits": 1
|
||||
}
|
||||
|
||||
@@ -294,7 +294,7 @@ async fn check_the_index_features(server: &Server) {
|
||||
|
||||
let (results, _status) =
|
||||
kefir.search_post(json!({ "sort": ["age:asc"], "filter": "surname = kefirounet" })).await;
|
||||
snapshot!(json_string!(results, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), name: "search_with_sort_and_filter");
|
||||
snapshot!(results, name: "search_with_sort_and_filter");
|
||||
|
||||
// ensuring we can get the vectors and their `regenerate` is still good.
|
||||
let (results, _status) = kefir.search_post(json!({"retrieveVectors": true})).await;
|
||||
|
||||
@@ -323,7 +323,7 @@ async fn binary_quantize_clear_documents() {
|
||||
// Make sure the vector DB has been cleared
|
||||
let (documents, _code) =
|
||||
index.search_post(json!({ "hybrid": { "embedder": "manual" }, "vector": [1, 1, 1] })).await;
|
||||
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(documents, @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
@@ -331,10 +331,9 @@ async fn binary_quantize_clear_documents() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -257,7 +257,7 @@ async fn search_with_vector() {
|
||||
json!({"vector": [1.0, 1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "rest"}, "limit": 1}
|
||||
)).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -270,10 +270,9 @@ async fn search_with_vector() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -289,7 +288,7 @@ async fn search_with_media() {
|
||||
))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -303,10 +302,9 @@ async fn search_with_media() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -392,7 +390,7 @@ async fn search_with_query() {
|
||||
))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -406,10 +404,9 @@ async fn search_with_query() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -2079,7 +2076,7 @@ async fn composite() {
|
||||
json!({"vector": [1.0, 1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "rest"}, "limit": 1}
|
||||
)).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".requestUid" => "[uuid]", ".processingTimeMs" => "[duration]" }), @r###"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2092,10 +2089,9 @@ async fn composite() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (value, code) = index
|
||||
.search_post(
|
||||
@@ -2104,7 +2100,7 @@ async fn composite() {
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(value, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r#"
|
||||
snapshot!(value, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -2118,7 +2114,6 @@ async fn composite() {
|
||||
"limit": 1,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 4,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"#);
|
||||
|
||||
@@ -689,7 +689,7 @@ async fn clear_documents() {
|
||||
// Make sure the vector DB has been cleared
|
||||
let (documents, _code) =
|
||||
index.search_post(json!({ "vector": [1, 1, 1], "hybrid": {"embedder": "manual"} })).await;
|
||||
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(documents, @r#"
|
||||
{
|
||||
"hits": [],
|
||||
"query": "",
|
||||
@@ -697,10 +697,9 @@ async fn clear_documents() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 0,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 0
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -744,7 +743,7 @@ async fn add_remove_one_vector_4588() {
|
||||
json!({"vector": [1, 1, 1], "hybrid": {"semanticRatio": 1.0, "embedder": "manual"} }),
|
||||
)
|
||||
.await;
|
||||
snapshot!(json_string!(documents, { ".processingTimeMs" => "[duration]", ".requestUid" => "[uuid]" }), @r###"
|
||||
snapshot!(documents, @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@@ -757,10 +756,9 @@ async fn add_remove_one_vector_4588() {
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 1,
|
||||
"requestUid": "[uuid]",
|
||||
"semanticHitCount": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (documents, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
|
||||
|
||||
@@ -126,7 +126,7 @@ enum Command {
|
||||
/// before running the copy and compaction. This way the current indexation must finish before
|
||||
/// the compaction operation can start. Once the compaction is done, the big index is replaced
|
||||
/// by the compacted one and the mutable transaction is released.
|
||||
IndexCompaction { index_name: String },
|
||||
CompactIndex { index_name: String },
|
||||
|
||||
/// Uses the hair dryer the dedicate pages hot in cache
|
||||
///
|
||||
@@ -165,7 +165,7 @@ fn main() -> anyhow::Result<()> {
|
||||
let target_version = parse_version(&target_version).context("While parsing `--target-version`. Make sure `--target-version` is in the format MAJOR.MINOR.PATCH")?;
|
||||
OfflineUpgrade { db_path, current_version: detected_version, target_version }.upgrade()
|
||||
}
|
||||
Command::IndexCompaction { index_name } => compact_index(db_path, &index_name),
|
||||
Command::CompactIndex { index_name } => compact_index(db_path, &index_name),
|
||||
Command::HairDryer { index_name, index_part } => {
|
||||
hair_dryer(db_path, &index_name, &index_part)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user