Compare commits

..

3 Commits

Author SHA1 Message Date
Clément Renault
2495058a6e Fix the tests 2023-11-15 10:21:30 +01:00
Kerollmops
4cfb48fbb6 Make the basic ranking rule boosting work 2023-11-08 14:47:35 +01:00
Kerollmops
67dc0268c5 Rename Criterion/Criteria into RankingRules 2023-11-08 11:22:59 +01:00
49 changed files with 465 additions and 694 deletions

View File

@@ -90,8 +90,7 @@ jobs:
set -x
export base_ref=$(git merge-base origin/main ${{ steps.comment-branch.outputs.head_ref }} | head -c8)
export base_filename=$(echo ${{ steps.command.outputs.command-arguments }}_main_${base_ref}.json)
export bench_name=$(echo ${{ steps.command.outputs.command-arguments }})
echo "Here are your $bench_name benchmarks diff 👊" >> body.txt
echo 'Here are your benchmarks diff 👊' >> body.txt
echo '```' >> body.txt
./benchmarks/scripts/compare.sh $base_filename ${{ steps.file.outputs.basename }}.json >> body.txt
echo '```' >> body.txt

21
Cargo.lock generated
View File

@@ -231,9 +231,9 @@ dependencies = [
[[package]]
name = "addr2line"
version = "0.21.0"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3"
dependencies = [
"gimli",
]
@@ -435,9 +435,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
version = "0.3.69"
version = "0.3.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12"
dependencies = [
"addr2line",
"cc",
@@ -1638,9 +1638,9 @@ dependencies = [
[[package]]
name = "gimli"
version = "0.28.0"
version = "0.27.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
[[package]]
name = "git2"
@@ -1894,7 +1894,6 @@ name = "index-scheduler"
version = "1.4.1"
dependencies = [
"anyhow",
"backtrace",
"big_s",
"bincode",
"crossbeam",
@@ -2857,9 +2856,9 @@ dependencies = [
[[package]]
name = "object"
version = "0.32.1"
version = "0.31.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0"
checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
dependencies = [
"memchr",
]
@@ -3641,9 +3640,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.108"
version = "1.0.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c"
dependencies = [
"indexmap 2.0.0",
"itoa",

View File

@@ -28,7 +28,6 @@ license = "MIT"
[profile.release]
codegen-units = 1
debug = true
[profile.dev.package.flate2]
opt-level = 3

View File

@@ -12,7 +12,7 @@ use milli::heed::EnvOpenOptions;
use milli::update::{
IndexDocuments, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig, Settings,
};
use milli::{Criterion, Filter, Index, Object, TermsMatchingStrategy};
use milli::{Filter, Index, Object, RankingRule, TermsMatchingStrategy};
use serde_json::Value;
pub struct Conf<'a> {
@@ -78,11 +78,11 @@ pub fn base_setup(conf: &Conf) -> Index {
if let Some(criterion) = conf.criterion {
builder.reset_filterable_fields();
builder.reset_criteria();
builder.reset_ranking_rules();
builder.reset_stop_words();
let criterion = criterion.iter().map(|s| Criterion::from_str(s).unwrap()).collect();
builder.set_criteria(criterion);
let criterion = criterion.iter().map(|s| RankingRule::from_str(s).unwrap()).collect();
builder.set_ranking_rules(criterion);
}
(conf.configure)(&mut builder);

View File

@@ -12,7 +12,6 @@ license.workspace = true
[dependencies]
anyhow = "1.0.70"
backtrace = "0.3.69"
bincode = "1.3.3"
csv = "1.2.1"
derive_builder = "0.12.0"

View File

@@ -825,10 +825,6 @@ impl IndexScheduler {
// 2. dump the tasks
let mut dump_tasks = dump.create_tasks_queue()?;
for ret in self.all_tasks.iter(&rtxn)? {
if self.must_stop_processing.get() {
return Err(Error::AbortedTask);
}
let (_, mut t) = ret?;
let status = t.status;
let content_file = t.content_uuid();
@@ -849,9 +845,6 @@ impl IndexScheduler {
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
if let Some(content_file) = content_file {
if self.must_stop_processing.get() {
return Err(Error::AbortedTask);
}
if status == Status::Enqueued {
let content_file = self.file_store.get_update(content_file)?;
@@ -891,9 +884,6 @@ impl IndexScheduler {
// 3.1. Dump the documents
for ret in index.all_documents(&rtxn)? {
if self.must_stop_processing.get() {
return Err(Error::AbortedTask);
}
let (_id, doc) = ret?;
let document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
index_dumper.push_document(&document)?;
@@ -913,9 +903,6 @@ impl IndexScheduler {
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
)).unwrap();
if self.must_stop_processing.get() {
return Err(Error::AbortedTask);
}
let path = self.dumps_path.join(format!("{}.dump", dump_uid));
let file = File::create(path)?;
dump.persist_to(BufWriter::new(file))?;

View File

@@ -108,8 +108,6 @@ pub enum Error {
TaskDeletionWithEmptyQuery,
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
TaskCancelationWithEmptyQuery,
#[error("Aborted task")]
AbortedTask,
#[error(transparent)]
Dump(#[from] dump::Error),
@@ -117,13 +115,8 @@ pub enum Error {
Heed(#[from] heed::Error),
#[error(transparent)]
Milli(#[from] milli::Error),
#[error("An unexpected crash occurred when processing the task. {}", {
match .0 {
Some(report) => format!("Get /reports/{}", report),
None => "No report was saved.".into(),
}
})]
ProcessBatchPanicked(Option<uuid::Uuid>),
#[error("An unexpected crash occurred when processing the task.")]
ProcessBatchPanicked,
#[error(transparent)]
FileStore(#[from] file_store::Error),
#[error(transparent)]
@@ -182,11 +175,10 @@ impl Error {
| Error::TaskNotFound(_)
| Error::TaskDeletionWithEmptyQuery
| Error::TaskCancelationWithEmptyQuery
| Error::AbortedTask
| Error::Dump(_)
| Error::Heed(_)
| Error::Milli(_)
| Error::ProcessBatchPanicked(_)
| Error::ProcessBatchPanicked
| Error::FileStore(_)
| Error::IoError(_)
| Error::Persist(_)
@@ -229,7 +221,7 @@ impl ErrorCode for Error {
Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice,
Error::Dump(e) => e.error_code(),
Error::Milli(e) => e.error_code(),
Error::ProcessBatchPanicked(_) => Code::Internal,
Error::ProcessBatchPanicked => Code::Internal,
Error::Heed(e) => e.error_code(),
Error::HeedTransaction(e) => e.error_code(),
Error::FileStore(e) => e.error_code(),
@@ -244,9 +236,6 @@ impl ErrorCode for Error {
Error::TaskDatabaseUpdate(_) => Code::Internal,
Error::CreateBatch(_) => Code::Internal,
// This one should never be seen by the end user
Error::AbortedTask => Code::Internal,
#[cfg(test)]
Error::PlannedFailure => Code::Internal,
}

View File

@@ -39,7 +39,6 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
test_breakpoint_sdr: _,
planned_failures: _,
run_loop_iteration: _,
panic_reader: _,
} = scheduler;
let rtxn = env.read_txn().unwrap();

View File

@@ -26,7 +26,6 @@ mod index_mapper;
#[cfg(test)]
mod insta_snapshot;
mod lru;
mod panic_hook;
mod utils;
mod uuid_codec;
@@ -54,8 +53,6 @@ use meilisearch_types::milli::documents::DocumentsBatchBuilder;
use meilisearch_types::milli::update::IndexerConfig;
use meilisearch_types::milli::{self, CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
use panic_hook::ReportReader;
pub use panic_hook::{Panic, Report, ReportRegistry};
use puffin::FrameView;
use roaring::RoaringBitmap;
use synchronoise::SignalEvent;
@@ -334,8 +331,6 @@ pub struct IndexScheduler {
/// The path to the version file of Meilisearch.
pub(crate) version_file_path: PathBuf,
pub(crate) panic_reader: ReportReader,
// ================= test
// The next entry is dedicated to the tests.
/// Provide a way to set a breakpoint in multiple part of the scheduler.
@@ -386,7 +381,6 @@ impl IndexScheduler {
#[cfg(test)]
run_loop_iteration: self.run_loop_iteration.clone(),
features: self.features.clone(),
panic_reader: self.panic_reader.clone(),
}
}
}
@@ -444,12 +438,6 @@ impl IndexScheduler {
let finished_at = env.create_database(&mut wtxn, Some(db_name::FINISHED_AT))?;
wtxn.commit()?;
const MAX_REPORT_COUNT: usize = 20;
let panic_reader = panic_hook::ReportReader::install_panic_hook(
std::num::NonZeroUsize::new(MAX_REPORT_COUNT).unwrap(),
);
// allow unreachable_code to get rids of the warning in the case of a test build.
let this = Self {
must_stop_processing: MustStopProcessing::default(),
@@ -490,7 +478,6 @@ impl IndexScheduler {
#[cfg(test)]
run_loop_iteration: Arc::new(RwLock::new(0)),
features,
panic_reader,
};
this.run();
@@ -1143,10 +1130,7 @@ impl IndexScheduler {
.name(String::from("batch-operation"))
.spawn(move || cloned_index_scheduler.process_batch(batch))
.unwrap();
self.panic_reader
.join_thread(handle)
.unwrap_or_else(|maybe_report| Err(Error::ProcessBatchPanicked(maybe_report)))
handle.join().unwrap_or(Err(Error::ProcessBatchPanicked))
};
#[cfg(test)]
@@ -1183,8 +1167,7 @@ impl IndexScheduler {
// If we have an abortion error we must stop the tick here and re-schedule tasks.
Err(Error::Milli(milli::Error::InternalError(
milli::InternalError::AbortedIndexation,
)))
| Err(Error::AbortedTask) => {
))) => {
#[cfg(test)]
self.breakpoint(Breakpoint::AbortedIndexation);
wtxn.abort().map_err(Error::HeedTransaction)?;
@@ -1327,10 +1310,6 @@ impl IndexScheduler {
}
}
pub fn reports(&self) -> Arc<RwLock<ReportRegistry>> {
self.panic_reader.registry()
}
/// Blocks the thread until the test handle asks to progress to/through this breakpoint.
///
/// Two messages are sent through the channel for each breakpoint.
@@ -4344,26 +4323,4 @@ mod tests {
}
"###);
}
#[test]
fn cancel_processing_dump() {
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
let dump_creation = KindWithContent::DumpCreation { keys: Vec::new(), instance_uid: None };
let dump_cancellation = KindWithContent::TaskCancelation {
query: "cancel dump".to_owned(),
tasks: RoaringBitmap::from_iter([0]),
};
let _ = index_scheduler.register(dump_creation).unwrap();
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_dump_register");
handle.advance_till([Start, BatchCreated, InsideProcessBatch]);
let _ = index_scheduler.register(dump_cancellation).unwrap();
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_registered");
snapshot!(format!("{:?}", handle.advance()), @"AbortedIndexation");
handle.advance_one_successful_batch();
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_processed");
}
}

View File

@@ -1,211 +0,0 @@
//! Panic hook designed to fetch a panic from a subthread and recover it on join.
use std::collections::VecDeque;
use std::num::NonZeroUsize;
use std::panic::PanicInfo;
use std::sync::{Arc, RwLock};
use std::thread::{JoinHandle, ThreadId};
use backtrace::Backtrace;
// Represents a panic in a shallowy structured fashion
pub struct Panic {
pub payload: Option<String>,
pub location: Option<String>,
pub thread_name: Option<String>,
pub thread_id: ThreadId,
pub backtrace: Backtrace,
}
/// A panic enriched with a unique id
#[derive(serde::Serialize)]
pub struct Report {
pub id: uuid::Uuid,
#[serde(serialize_with = "serialize_panic")]
pub panic: Panic,
}
fn serialize_panic<S>(panic: &Panic, s: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::Serialize;
panic.to_json().serialize(s)
}
impl Report {
pub fn new(panic: Panic) -> Self {
Self { id: uuid::Uuid::new_v4(), panic }
}
}
impl Panic {
pub fn to_json(&self) -> serde_json::Value {
json::panic_to_json(self)
}
}
mod json {
use backtrace::{Backtrace, BacktraceFrame, BacktraceSymbol};
use serde_json::{json, Value};
use super::Panic;
fn symbol_to_json(symbol: &BacktraceSymbol) -> Value {
let address = symbol.addr().map(|addr| format!("{:p}", addr));
let column = symbol.colno();
let line = symbol.lineno();
let function = symbol.name().map(|name| name.to_string());
let filename = symbol.filename();
json!({
"function": function,
"filename": filename,
"line": line,
"column": column,
"address": address,
})
}
fn frame_to_json(frame: &BacktraceFrame) -> Value {
let symbols: Vec<_> = frame.symbols().iter().map(symbol_to_json).collect();
match symbols.as_slice() {
[] => {
let address = format!("{:p}", frame.ip());
json!({"address": address})
}
[symbol] => json!(symbol),
symbols => json!(symbols),
}
}
fn backtrace_to_json(backtrace: &Backtrace) -> Value {
let frames: Vec<_> = backtrace.frames().iter().map(frame_to_json).collect();
json!(frames)
}
pub fn panic_to_json(panic: &Panic) -> Value {
let thread_id = format!("{:?}", panic.thread_id);
serde_json::json!({
"payload": panic.payload,
"location": panic.location,
"thread": {
"id": thread_id,
"name": panic.thread_name,
},
"backtrace": backtrace_to_json(&panic.backtrace),
})
}
}
struct ReportWriter(Arc<RwLock<ReportRegistry>>);
/// A FIFO queue of reports.
pub struct ReportRegistry {
reports: std::collections::VecDeque<Report>,
}
impl ReportRegistry {
pub fn new(capacity: NonZeroUsize) -> Self {
Self { reports: VecDeque::with_capacity(capacity.get()) }
}
pub fn push(&mut self, report: Report) -> Option<Report> {
let popped = if self.reports.len() == self.reports.capacity() {
self.reports.pop_back()
} else {
None
};
self.reports.push_front(report);
popped
}
pub fn iter(&self) -> impl Iterator<Item = &Report> {
self.reports.iter()
}
pub fn find(&self, report_id: uuid::Uuid) -> Option<&Report> {
self.iter().find(|report| report.id == report_id)
}
}
impl ReportWriter {
#[track_caller]
fn write_panic(&self, panic_info: &PanicInfo<'_>) {
let payload = panic_info
.payload()
.downcast_ref::<&str>()
.map(ToString::to_string)
.or_else(|| panic_info.payload().downcast_ref::<String>().cloned());
let location = panic_info.location().map(|loc| {
format!(
"{file}:{line}:{column}",
file = loc.file(),
line = loc.line(),
column = loc.column()
)
});
let thread_name = std::thread::current().name().map(ToString::to_string);
let thread_id = std::thread::current().id();
let backtrace = backtrace::Backtrace::new();
let panic = Panic { payload, location, thread_name, thread_id, backtrace };
let report = Report::new(panic);
log::error!(
"An unexpected panic occurred on thread {name} at {location}: {payload}. See report '{report}' for details.",
payload = report.panic.payload.as_deref().unwrap_or("Box<dyn Any>"),
name = report.panic.thread_name.as_deref().unwrap_or("<unnamed>"),
location = report.panic.location.as_deref().unwrap_or("<unknown>"),
report = report.id,
);
if let Ok(mut registry) = self.0.write() {
if let Some(old_report) = registry.push(report) {
log::trace!("Forgetting report {} to make space for new report.", old_report.id)
}
}
}
}
/// Reads the reports written in case of a panic.
#[derive(Clone)]
pub struct ReportReader(Arc<RwLock<ReportRegistry>>);
impl ReportReader {
/// Installs a new global panic hook, overriding any existing hook.
///
/// The hook writes any incoming panic in reports.
/// The reports can then be read by the returned [`ReportReader`].
pub fn install_panic_hook(capacity: NonZeroUsize) -> Self {
let registry = Arc::new(RwLock::new(ReportRegistry::new(capacity)));
let reader = ReportReader(registry.clone());
let writer = ReportWriter(registry.clone());
std::panic::set_hook(Box::new(move |panic_info| writer.write_panic(panic_info)));
reader
}
/// Join the thread corresponding to the passed handle, recovering either its value
/// or, in case the thread panicked, the id of the report corresponding to the panic.
///
/// The id can be used to read the report from the [`self.registry()`].
pub fn join_thread<T>(&self, thread: JoinHandle<T>) -> Result<T, Option<uuid::Uuid>> {
let thread_id = thread.thread().id();
thread.join().map_err(|_e| {
self.0
.read()
.unwrap()
.iter()
.find(|report| report.panic.thread_id == thread_id)
.map(|report| report.id)
})
}
/// Returns a registry that can be used to read the reports written during a panic.
pub fn registry(&self) -> Arc<RwLock<ReportRegistry>> {
self.0.clone()
}
}

View File

@@ -1,35 +0,0 @@
---
source: index-scheduler/src/lib.rs
---
### Autobatching Enabled = true
### Processing Tasks:
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
----------------------------------------------------------------------
### Status:
enqueued [0,]
----------------------------------------------------------------------
### Kind:
"dumpCreation" [0,]
----------------------------------------------------------------------
### Index Tasks:
----------------------------------------------------------------------
### Index Mapper:
----------------------------------------------------------------------
### Canceled By:
----------------------------------------------------------------------
### Enqueued At:
[timestamp] [0,]
----------------------------------------------------------------------
### Started At:
----------------------------------------------------------------------
### Finished At:
----------------------------------------------------------------------
### File Store:
----------------------------------------------------------------------

View File

@@ -1,45 +0,0 @@
---
source: index-scheduler/src/lib.rs
---
### Autobatching Enabled = true
### Processing Tasks:
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: canceled, canceled_by: 1, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(0), original_filter: "cancel dump" }, kind: TaskCancelation { query: "cancel dump", tasks: RoaringBitmap<[0]> }}
----------------------------------------------------------------------
### Status:
enqueued []
succeeded [1,]
canceled [0,]
----------------------------------------------------------------------
### Kind:
"taskCancelation" [1,]
"dumpCreation" [0,]
----------------------------------------------------------------------
### Index Tasks:
----------------------------------------------------------------------
### Index Mapper:
----------------------------------------------------------------------
### Canceled By:
1 [0,]
----------------------------------------------------------------------
### Enqueued At:
[timestamp] [0,]
[timestamp] [1,]
----------------------------------------------------------------------
### Started At:
[timestamp] [0,]
[timestamp] [1,]
----------------------------------------------------------------------
### Finished At:
[timestamp] [0,]
[timestamp] [1,]
----------------------------------------------------------------------
### File Store:
----------------------------------------------------------------------

View File

@@ -1,38 +0,0 @@
---
source: index-scheduler/src/lib.rs
---
### Autobatching Enabled = true
### Processing Tasks:
[0,]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "cancel dump" }, kind: TaskCancelation { query: "cancel dump", tasks: RoaringBitmap<[0]> }}
----------------------------------------------------------------------
### Status:
enqueued [0,1,]
----------------------------------------------------------------------
### Kind:
"taskCancelation" [1,]
"dumpCreation" [0,]
----------------------------------------------------------------------
### Index Tasks:
----------------------------------------------------------------------
### Index Mapper:
----------------------------------------------------------------------
### Canceled By:
----------------------------------------------------------------------
### Enqueued At:
[timestamp] [0,]
[timestamp] [1,]
----------------------------------------------------------------------
### Started At:
----------------------------------------------------------------------
### Finished At:
----------------------------------------------------------------------
### File Store:
----------------------------------------------------------------------

View File

@@ -88,6 +88,7 @@ pub trait ErrorCode {
}
}
#[allow(clippy::enum_variant_names)]
enum ErrorType {
Internal,
InvalidRequest,
@@ -297,7 +298,6 @@ MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
ReportNotFound , InvalidRequest , NOT_FOUND ;
TaskNotFound , InvalidRequest , NOT_FOUND ;
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
UnretrievableDocument , Internal , BAD_REQUEST ;

View File

@@ -9,7 +9,7 @@ use std::str::FromStr;
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
use fst::IntoStreamer;
use milli::update::Setting;
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
use milli::{Index, RankingRule, RankingRuleError, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize, Serializer};
use crate::deserr::DeserrJsonError;
@@ -117,10 +117,10 @@ pub struct PaginationSettings {
pub max_total_hits: Setting<usize>,
}
impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRankingRules> {
impl MergeWithError<milli::RankingRuleError> for DeserrJsonError<InvalidSettingsRankingRules> {
fn merge(
_self_: Option<Self>,
other: milli::CriterionError,
other: milli::RankingRuleError,
merge_location: ValuePointerRef,
) -> ControlFlow<Self, Self> {
Self::error::<Infallible>(
@@ -344,9 +344,9 @@ pub fn apply_settings_to_builder(
match settings.ranking_rules {
Setting::Set(ref criteria) => {
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
builder.set_ranking_rules(criteria.iter().map(|c| c.clone().into()).collect())
}
Setting::Reset => builder.reset_criteria(),
Setting::Reset => builder.reset_ranking_rules(),
Setting::NotSet => (),
}
@@ -578,11 +578,13 @@ pub fn settings(
}
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
#[deserr(try_from(&String) = FromStr::from_str -> CriterionError)]
#[deserr(try_from(&String) = FromStr::from_str -> RankingRuleError)]
pub enum RankingRuleView {
/// Sorted by decreasing number of matched query terms.
/// Query words at the front of an attribute is considered better than if it was at the back.
Words,
/// Sorted by documents matching the given filter and then documents not matching it.
Boost(String),
/// Sorted by increasing number of typos.
Typo,
/// Sorted by increasing distance between matched query terms.
@@ -605,7 +607,7 @@ impl Serialize for RankingRuleView {
where
S: Serializer,
{
serializer.serialize_str(&format!("{}", Criterion::from(self.clone())))
serializer.serialize_str(&format!("{}", RankingRule::from(self.clone())))
}
}
impl<'de> Deserialize<'de> for RankingRuleView {
@@ -623,7 +625,7 @@ impl<'de> Deserialize<'de> for RankingRuleView {
where
E: serde::de::Error,
{
let criterion = Criterion::from_str(v).map_err(|_| {
let criterion = RankingRule::from_str(v).map_err(|_| {
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid ranking rule")
})?;
Ok(RankingRuleView::from(criterion))
@@ -633,42 +635,44 @@ impl<'de> Deserialize<'de> for RankingRuleView {
}
}
impl FromStr for RankingRuleView {
type Err = <Criterion as FromStr>::Err;
type Err = <RankingRule as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(RankingRuleView::from(Criterion::from_str(s)?))
Ok(RankingRuleView::from(RankingRule::from_str(s)?))
}
}
impl fmt::Display for RankingRuleView {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fmt::Display::fmt(&Criterion::from(self.clone()), f)
fmt::Display::fmt(&RankingRule::from(self.clone()), f)
}
}
impl From<Criterion> for RankingRuleView {
fn from(value: Criterion) -> Self {
impl From<RankingRule> for RankingRuleView {
fn from(value: RankingRule) -> Self {
match value {
Criterion::Words => RankingRuleView::Words,
Criterion::Typo => RankingRuleView::Typo,
Criterion::Proximity => RankingRuleView::Proximity,
Criterion::Attribute => RankingRuleView::Attribute,
Criterion::Sort => RankingRuleView::Sort,
Criterion::Exactness => RankingRuleView::Exactness,
Criterion::Asc(x) => RankingRuleView::Asc(x),
Criterion::Desc(x) => RankingRuleView::Desc(x),
RankingRule::Words => RankingRuleView::Words,
RankingRule::Boost(filter) => RankingRuleView::Boost(filter),
RankingRule::Typo => RankingRuleView::Typo,
RankingRule::Proximity => RankingRuleView::Proximity,
RankingRule::Attribute => RankingRuleView::Attribute,
RankingRule::Sort => RankingRuleView::Sort,
RankingRule::Exactness => RankingRuleView::Exactness,
RankingRule::Asc(x) => RankingRuleView::Asc(x),
RankingRule::Desc(x) => RankingRuleView::Desc(x),
}
}
}
impl From<RankingRuleView> for Criterion {
impl From<RankingRuleView> for RankingRule {
fn from(value: RankingRuleView) -> Self {
match value {
RankingRuleView::Words => Criterion::Words,
RankingRuleView::Typo => Criterion::Typo,
RankingRuleView::Proximity => Criterion::Proximity,
RankingRuleView::Attribute => Criterion::Attribute,
RankingRuleView::Sort => Criterion::Sort,
RankingRuleView::Exactness => Criterion::Exactness,
RankingRuleView::Asc(x) => Criterion::Asc(x),
RankingRuleView::Desc(x) => Criterion::Desc(x),
RankingRuleView::Words => RankingRule::Words,
RankingRuleView::Boost(filter) => RankingRule::Boost(filter),
RankingRuleView::Typo => RankingRule::Typo,
RankingRuleView::Proximity => RankingRule::Proximity,
RankingRuleView::Attribute => RankingRule::Attribute,
RankingRuleView::Sort => RankingRule::Sort,
RankingRuleView::Exactness => RankingRule::Exactness,
RankingRuleView::Asc(x) => RankingRule::Asc(x),
RankingRuleView::Desc(x) => RankingRule::Desc(x),
}
}
}

View File

@@ -51,8 +51,6 @@ pub enum MeilisearchHttpError {
DocumentFormat(#[from] DocumentFormatError),
#[error(transparent)]
Join(#[from] JoinError),
#[error("Report `{0}` not found. Either its id is incorrect, or it was deleted. To save on memory, only a limited amount of reports are kept.")]
ReportNotFound(uuid::Uuid),
}
impl ErrorCode for MeilisearchHttpError {
@@ -76,7 +74,6 @@ impl ErrorCode for MeilisearchHttpError {
MeilisearchHttpError::FileStore(_) => Code::Internal,
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
MeilisearchHttpError::Join(_) => Code::Internal,
MeilisearchHttpError::ReportNotFound(_) => Code::ReportNotFound,
}
}
}

View File

@@ -24,7 +24,6 @@ pub mod features;
pub mod indexes;
mod metrics;
mod multi_search;
mod reports;
mod snapshot;
mod swap_indexes;
pub mod tasks;
@@ -41,8 +40,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/multi-search").configure(multi_search::configure))
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
.service(web::scope("/metrics").configure(metrics::configure))
.service(web::scope("/experimental-features").configure(features::configure))
.service(web::scope("/reports").configure(reports::configure));
.service(web::scope("/experimental-features").configure(features::configure));
}
#[derive(Debug, Serialize)]

View File

@@ -1,39 +0,0 @@
use actix_web::web::{self, Data};
use actix_web::HttpResponse;
use index_scheduler::{IndexScheduler, Report};
use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::actions;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(list_reports))).service(
web::scope("/{report_uid}")
.service(web::resource("").route(web::get().to(SeqHandler(get_report)))),
);
}
pub async fn list_reports(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_ALL }>, Data<IndexScheduler>>,
) -> Result<HttpResponse, ResponseError> {
let reports = &index_scheduler.reports();
let reports = &reports.read().unwrap();
let reports: Vec<&Report> = reports.iter().collect();
Ok(HttpResponse::Ok().json(reports))
}
pub async fn get_report(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_ALL }>, Data<IndexScheduler>>,
report_id: web::Path<uuid::Uuid>,
) -> Result<HttpResponse, ResponseError> {
let reports = &index_scheduler.reports();
let reports = &reports.read().unwrap();
let report = reports
.find(*report_id)
.ok_or(crate::error::MeilisearchHttpError::ReportNotFound(*report_id))?;
Ok(HttpResponse::Ok().json(report))
}

View File

@@ -3,7 +3,7 @@ use heed::EnvOpenOptions;
// use maplit::hashset;
use milli::{
update::{IndexerConfig, Settings},
Criterion, Index,
Index, RankingRule,
};
fn main() {
@@ -19,13 +19,13 @@ fn main() {
// builder.set_min_word_len_one_typo(5);
// builder.set_min_word_len_two_typos(7);
// builder.set_sortable_fields(hashset! { S("release_date") });
builder.set_criteria(vec![
Criterion::Words,
Criterion::Typo,
Criterion::Proximity,
Criterion::Attribute,
Criterion::Sort,
Criterion::Exactness,
builder.set_ranking_rules(vec![
RankingRule::Words,
RankingRule::Typo,
RankingRule::Proximity,
RankingRule::Attribute,
RankingRule::Sort,
RankingRule::Exactness,
]);
builder.execute(|_| (), || false).unwrap();

View File

@@ -8,7 +8,7 @@ use thiserror::Error;
use crate::error::is_reserved_keyword;
use crate::search::facet::BadGeoError;
use crate::{CriterionError, Error, UserError};
use crate::{Error, RankingRuleError, UserError};
/// This error type is never supposed to be shown to the end user.
/// You must always cast it to a sort error or a criterion error.
@@ -28,23 +28,23 @@ impl From<BadGeoError> for AscDescError {
}
}
impl From<AscDescError> for CriterionError {
impl From<AscDescError> for RankingRuleError {
fn from(error: AscDescError) -> Self {
match error {
AscDescError::GeoError(_) => {
CriterionError::ReservedNameForSort { name: "_geoPoint".to_string() }
RankingRuleError::ReservedNameForSort { name: "_geoPoint".to_string() }
}
AscDescError::InvalidSyntax { name } => CriterionError::InvalidName { name },
AscDescError::InvalidSyntax { name } => RankingRuleError::InvalidName { name },
AscDescError::ReservedKeyword { name } if name.starts_with("_geoPoint") => {
CriterionError::ReservedNameForSort { name: "_geoPoint".to_string() }
RankingRuleError::ReservedNameForSort { name: "_geoPoint".to_string() }
}
AscDescError::ReservedKeyword { name } if name.starts_with("_geoRadius") => {
CriterionError::ReservedNameForFilter { name: "_geoRadius".to_string() }
RankingRuleError::ReservedNameForFilter { name: "_geoRadius".to_string() }
}
AscDescError::ReservedKeyword { name } if name.starts_with("_geoBoundingBox") => {
CriterionError::ReservedNameForFilter { name: "_geoBoundingBox".to_string() }
RankingRuleError::ReservedNameForFilter { name: "_geoBoundingBox".to_string() }
}
AscDescError::ReservedKeyword { name } => CriterionError::ReservedName { name },
AscDescError::ReservedKeyword { name } => RankingRuleError::ReservedName { name },
}
}
}

38
milli/src/boost.rs Normal file
View File

@@ -0,0 +1,38 @@
//! This module provides the `Boost` type and defines all the errors related to this type.
use std::str::FromStr;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::RankingRuleError;
/// This error type is never supposed to be shown to the end user.
/// You must always cast it to a sort error or a criterion error.
#[derive(Error, Debug)]
pub enum BoostError {
#[error("Invalid syntax for the boost parameter: expected expression starting with `boost:`, found `{name}`.")]
InvalidSyntax { name: String },
}
impl From<BoostError> for RankingRuleError {
fn from(error: BoostError) -> Self {
match error {
BoostError::InvalidSyntax { name } => RankingRuleError::InvalidName { name },
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
pub struct Boost(pub String);
impl FromStr for Boost {
type Err = BoostError;
fn from_str(text: &str) -> Result<Boost, Self::Err> {
match text.split_once(':') {
Some(("boost", right)) => Ok(Boost(right.to_string())), // TODO check filter validity
_ => Err(BoostError::InvalidSyntax { name: text.to_string() }),
}
}
}

View File

@@ -9,7 +9,7 @@ use serde_json::Value;
use thiserror::Error;
use crate::documents::{self, DocumentsBatchCursorError};
use crate::{CriterionError, DocumentId, FieldId, Object, SortError};
use crate::{DocumentId, FieldId, Object, RankingRuleError, SortError};
pub fn is_reserved_keyword(keyword: &str) -> bool {
["_geo", "_geoDistance", "_geoPoint", "_geoRadius", "_geoBoundingBox"].contains(&keyword)
@@ -94,7 +94,7 @@ pub enum UserError {
#[error("A document cannot contain more than 65,535 fields.")]
AttributeLimitReached,
#[error(transparent)]
CriterionError(#[from] CriterionError),
CriterionError(#[from] RankingRuleError),
#[error("Maximum number of documents reached.")]
DocumentLimitReached,
#[error(
@@ -280,7 +280,7 @@ error_from_sub_error! {
ThreadPoolBuildError => InternalError,
SerializationError => InternalError,
GeoError => UserError,
CriterionError => UserError,
RankingRuleError => UserError,
}
impl<E> From<grenad::Error<E>> for Error

View File

@@ -25,10 +25,9 @@ use crate::heed_codec::{
};
use crate::readable_slices::ReadableSlices;
use crate::{
default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds,
FacetDistribution, FieldDistribution, FieldId, FieldIdWordCountCodec, GeoPoint, ObkvCodec,
OrderBy, Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16,
BEU32,
default_criteria, CboRoaringBitmapCodec, DocumentId, ExternalDocumentsIds, FacetDistribution,
FieldDistribution, FieldId, FieldIdWordCountCodec, GeoPoint, ObkvCodec, OrderBy, RankingRule,
Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, BEU32,
};
/// The HNSW data-structure that we serialize, fill and search in.
@@ -895,7 +894,7 @@ impl Index {
let distinct_field = self.distinct_field(rtxn)?;
let asc_desc_fields =
self.criteria(rtxn)?.into_iter().filter_map(|criterion| match criterion {
Criterion::Asc(field) | Criterion::Desc(field) => Some(field),
RankingRule::Asc(field) | RankingRule::Desc(field) => Some(field),
_otherwise => None,
});
@@ -1023,17 +1022,17 @@ impl Index {
pub(crate) fn put_criteria(
&self,
wtxn: &mut RwTxn,
criteria: &[Criterion],
criteria: &[RankingRule],
) -> heed::Result<()> {
self.main.put::<_, Str, SerdeJson<&[Criterion]>>(wtxn, main_key::CRITERIA_KEY, &criteria)
self.main.put::<_, Str, SerdeJson<&[RankingRule]>>(wtxn, main_key::CRITERIA_KEY, &criteria)
}
pub(crate) fn delete_criteria(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
self.main.delete::<_, Str>(wtxn, main_key::CRITERIA_KEY)
}
pub fn criteria(&self, rtxn: &RoTxn) -> heed::Result<Vec<Criterion>> {
match self.main.get::<_, Str, SerdeJson<Vec<Criterion>>>(rtxn, main_key::CRITERIA_KEY)? {
pub fn criteria(&self, rtxn: &RoTxn) -> heed::Result<Vec<RankingRule>> {
match self.main.get::<_, Str, SerdeJson<Vec<RankingRule>>>(rtxn, main_key::CRITERIA_KEY)? {
Some(criteria) => Ok(criteria),
None => Ok(default_criteria()),
}

View File

@@ -9,7 +9,7 @@ pub static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
pub mod documents;
mod asc_desc;
mod criterion;
mod boost;
pub mod distance;
mod error;
mod external_documents_ids;
@@ -18,6 +18,7 @@ mod fields_ids_map;
pub mod heed_codec;
pub mod index;
pub mod proximity;
mod ranking_rule;
mod readable_slices;
pub mod score_details;
mod search;
@@ -44,7 +45,6 @@ use serde_json::Value;
pub use {charabia as tokenizer, heed};
pub use self::asc_desc::{AscDesc, AscDescError, Member, SortError};
pub use self::criterion::{default_criteria, Criterion, CriterionError};
pub use self::error::{
Error, FieldIdMapMissingEntry, InternalError, SerializationError, UserError,
};
@@ -57,6 +57,7 @@ pub use self::heed_codec::{
UncheckedU8StrStrCodec,
};
pub use self::index::Index;
pub use self::ranking_rule::{default_criteria, RankingRule, RankingRuleError};
pub use self::search::{
FacetDistribution, FacetValueHit, Filter, FormatOptions, MatchBounds, MatcherBuilder,
MatchingWords, OrderBy, Search, SearchForFacetValues, SearchResult, TermsMatchingStrategy,

View File

@@ -4,10 +4,11 @@ use std::str::FromStr;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::{AscDesc, Member};
use crate::boost::{Boost, BoostError};
use crate::{AscDesc, AscDescError, Member};
#[derive(Error, Debug)]
pub enum CriterionError {
pub enum RankingRuleError {
#[error("`{name}` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.")]
InvalidName { name: String },
#[error("`{name}` is a reserved keyword and thus can't be used as a ranking rule")]
@@ -25,10 +26,12 @@ pub enum CriterionError {
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub enum Criterion {
pub enum RankingRule {
/// Sorted by decreasing number of matched query terms.
/// Query words at the front of an attribute is considered better than if it was at the back.
Words,
/// Sorted by documents matching the given filter and then documents not matching it.
Boost(String),
/// Sorted by increasing number of typos.
Typo,
/// Sorted by increasing distance between matched query terms.
@@ -47,62 +50,76 @@ pub enum Criterion {
Desc(String),
}
impl Criterion {
impl RankingRule {
/// Returns the field name parameter of this criterion.
pub fn field_name(&self) -> Option<&str> {
match self {
Criterion::Asc(name) | Criterion::Desc(name) => Some(name),
RankingRule::Asc(name) | RankingRule::Desc(name) => Some(name),
_otherwise => None,
}
}
}
impl FromStr for Criterion {
type Err = CriterionError;
impl FromStr for RankingRule {
type Err = RankingRuleError;
fn from_str(text: &str) -> Result<Criterion, Self::Err> {
fn from_str(text: &str) -> Result<RankingRule, Self::Err> {
match text {
"words" => Ok(Criterion::Words),
"typo" => Ok(Criterion::Typo),
"proximity" => Ok(Criterion::Proximity),
"attribute" => Ok(Criterion::Attribute),
"sort" => Ok(Criterion::Sort),
"exactness" => Ok(Criterion::Exactness),
text => match AscDesc::from_str(text)? {
AscDesc::Asc(Member::Field(field)) => Ok(Criterion::Asc(field)),
AscDesc::Desc(Member::Field(field)) => Ok(Criterion::Desc(field)),
AscDesc::Asc(Member::Geo(_)) | AscDesc::Desc(Member::Geo(_)) => {
Err(CriterionError::ReservedNameForSort { name: "_geoPoint".to_string() })?
}
"words" => Ok(RankingRule::Words),
"typo" => Ok(RankingRule::Typo),
"proximity" => Ok(RankingRule::Proximity),
"attribute" => Ok(RankingRule::Attribute),
"sort" => Ok(RankingRule::Sort),
"exactness" => Ok(RankingRule::Exactness),
text => match (AscDesc::from_str(text), Boost::from_str(text)) {
(Ok(asc_desc), _) => match asc_desc {
AscDesc::Asc(Member::Field(field)) => Ok(RankingRule::Asc(field)),
AscDesc::Desc(Member::Field(field)) => Ok(RankingRule::Desc(field)),
AscDesc::Asc(Member::Geo(_)) | AscDesc::Desc(Member::Geo(_)) => {
Err(RankingRuleError::ReservedNameForSort {
name: "_geoPoint".to_string(),
})?
}
},
(_, Ok(Boost(filter))) => Ok(RankingRule::Boost(filter)),
(
Err(AscDescError::InvalidSyntax { name: asc_desc_name }),
Err(BoostError::InvalidSyntax { name: boost_name }),
) => Err(RankingRuleError::InvalidName {
// TODO improve the error message quality
name: format!("{asc_desc_name} {boost_name}"),
}),
(Err(asc_desc_error), _) => Err(asc_desc_error.into()),
},
}
}
}
pub fn default_criteria() -> Vec<Criterion> {
pub fn default_criteria() -> Vec<RankingRule> {
vec![
Criterion::Words,
Criterion::Typo,
Criterion::Proximity,
Criterion::Attribute,
Criterion::Sort,
Criterion::Exactness,
RankingRule::Words,
RankingRule::Typo,
RankingRule::Proximity,
RankingRule::Attribute,
RankingRule::Sort,
RankingRule::Exactness,
]
}
impl fmt::Display for Criterion {
impl fmt::Display for RankingRule {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use Criterion::*;
use RankingRule::*;
match self {
Words => f.write_str("words"),
Boost(filter) => write!(f, "boost:{filter}"),
Typo => f.write_str("typo"),
Proximity => f.write_str("proximity"),
Attribute => f.write_str("attribute"),
Sort => f.write_str("sort"),
Exactness => f.write_str("exactness"),
Asc(attr) => write!(f, "{}:asc", attr),
Desc(attr) => write!(f, "{}:desc", attr),
Asc(attr) => write!(f, "{attr}:asc"),
Desc(attr) => write!(f, "{attr}:desc"),
}
}
}
@@ -110,29 +127,29 @@ impl fmt::Display for Criterion {
#[cfg(test)]
mod tests {
use big_s::S;
use CriterionError::*;
use RankingRuleError::*;
use super::*;
#[test]
fn parse_criterion() {
let valid_criteria = [
("words", Criterion::Words),
("typo", Criterion::Typo),
("proximity", Criterion::Proximity),
("attribute", Criterion::Attribute),
("sort", Criterion::Sort),
("exactness", Criterion::Exactness),
("price:asc", Criterion::Asc(S("price"))),
("price:desc", Criterion::Desc(S("price"))),
("price:asc:desc", Criterion::Desc(S("price:asc"))),
("truc:machin:desc", Criterion::Desc(S("truc:machin"))),
("hello-world!:desc", Criterion::Desc(S("hello-world!"))),
("it's spacy over there:asc", Criterion::Asc(S("it's spacy over there"))),
("words", RankingRule::Words),
("typo", RankingRule::Typo),
("proximity", RankingRule::Proximity),
("attribute", RankingRule::Attribute),
("sort", RankingRule::Sort),
("exactness", RankingRule::Exactness),
("price:asc", RankingRule::Asc(S("price"))),
("price:desc", RankingRule::Desc(S("price"))),
("price:asc:desc", RankingRule::Desc(S("price:asc"))),
("truc:machin:desc", RankingRule::Desc(S("truc:machin"))),
("hello-world!:desc", RankingRule::Desc(S("hello-world!"))),
("it's spacy over there:asc", RankingRule::Asc(S("it's spacy over there"))),
];
for (input, expected) in valid_criteria {
let res = input.parse::<Criterion>();
let res = input.parse::<RankingRule>();
assert!(
res.is_ok(),
"Failed to parse `{}`, was expecting `{:?}` but instead got `{:?}`",
@@ -167,7 +184,7 @@ mod tests {
];
for (input, expected) in invalid_criteria {
let res = input.parse::<Criterion>();
let res = input.parse::<RankingRule>();
assert!(
res.is_err(),
"Should no be able to parse `{}`, was expecting an error but instead got: `{:?}`",

View File

@@ -5,6 +5,7 @@ use crate::distance_between_two_points;
#[derive(Debug, Clone, PartialEq)]
pub enum ScoreDetails {
Words(Words),
Boost(Boost),
Typo(Typo),
Proximity(Rank),
Fid(Rank),
@@ -23,6 +24,7 @@ impl ScoreDetails {
pub fn rank(&self) -> Option<Rank> {
match self {
ScoreDetails::Words(details) => Some(details.rank()),
ScoreDetails::Boost(_) => None,
ScoreDetails::Typo(details) => Some(details.rank()),
ScoreDetails::Proximity(details) => Some(*details),
ScoreDetails::Fid(details) => Some(*details),
@@ -60,6 +62,14 @@ impl ScoreDetails {
details_map.insert("words".into(), words_details);
order += 1;
}
ScoreDetails::Boost(Boost { filter, matching }) => {
let sort = format!("boost:{}", filter);
let sort_details = serde_json::json!({
"value": matching,
});
details_map.insert(sort, sort_details);
order += 1;
}
ScoreDetails::Typo(typo) => {
let typo_details = serde_json::json!({
"order": order,
@@ -221,6 +231,12 @@ impl Words {
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Boost {
pub filter: String,
pub matching: bool,
}
/// Structure that is super similar to [`Words`], but whose semantics is a bit distinct.
///
/// In exactness, the number of matching words can actually be 0 with a non-zero score,

View File

@@ -0,0 +1,88 @@
use roaring::RoaringBitmap;
use super::logger::SearchLogger;
use super::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait, SearchContext};
use crate::score_details::{self, ScoreDetails};
use crate::{Filter, Result};
pub struct Boost<Query> {
original_expression: String,
original_query: Option<Query>,
matching: Option<RankingRuleOutput<Query>>,
non_matching: Option<RankingRuleOutput<Query>>,
}
impl<Query> Boost<Query> {
pub fn new(expression: String) -> Result<Self> {
Ok(Self {
original_expression: expression,
original_query: None,
matching: None,
non_matching: None,
})
}
}
impl<'ctx, Query: RankingRuleQueryTrait> RankingRule<'ctx, Query> for Boost<Query> {
fn id(&self) -> String {
// TODO improve this
let Self { original_expression, .. } = self;
format!("boost:{original_expression}")
}
fn start_iteration(
&mut self,
ctx: &mut SearchContext<'ctx>,
_logger: &mut dyn SearchLogger<Query>,
parent_candidates: &RoaringBitmap,
parent_query: &Query,
) -> Result<()> {
let universe_matching = match Filter::from_str(&self.original_expression)? {
Some(filter) => filter.evaluate(ctx.txn, ctx.index)?,
None => RoaringBitmap::default(),
};
let matching = parent_candidates & universe_matching;
let non_matching = parent_candidates - &matching;
self.original_query = Some(parent_query.clone());
self.matching = Some(RankingRuleOutput {
query: parent_query.clone(),
candidates: matching,
score: ScoreDetails::Boost(score_details::Boost {
filter: self.original_expression.clone(),
matching: true,
}),
});
self.non_matching = Some(RankingRuleOutput {
query: parent_query.clone(),
candidates: non_matching,
score: ScoreDetails::Boost(score_details::Boost {
filter: self.original_expression.clone(),
matching: false,
}),
});
Ok(())
}
fn next_bucket(
&mut self,
_ctx: &mut SearchContext<'ctx>,
_logger: &mut dyn SearchLogger<Query>,
_universe: &RoaringBitmap,
) -> Result<Option<RankingRuleOutput<Query>>> {
Ok(self.matching.take().or_else(|| self.non_matching.take()))
}
fn end_iteration(
&mut self,
_ctx: &mut SearchContext<'ctx>,
_logger: &mut dyn SearchLogger<Query>,
) {
self.original_query = None;
self.matching = None;
self.non_matching = None;
}
}

View File

@@ -14,6 +14,7 @@ mod ranking_rules;
mod resolve_query_graph;
mod small_bitmap;
mod boost;
mod exact_attribute;
mod sort;
@@ -22,6 +23,7 @@ mod tests;
use std::collections::HashSet;
use boost::Boost;
use bucket_sort::{bucket_sort, BucketSortOutput};
use charabia::TokenizerBuilder;
use db_cache::DatabaseCache;
@@ -203,12 +205,13 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
for rr in settings_ranking_rules {
match rr {
// These rules need a query to have an effect; ignore them in placeholder search
crate::Criterion::Words
| crate::Criterion::Typo
| crate::Criterion::Attribute
| crate::Criterion::Proximity
| crate::Criterion::Exactness => continue,
crate::Criterion::Sort => {
crate::RankingRule::Words
| crate::RankingRule::Typo
| crate::RankingRule::Attribute
| crate::RankingRule::Proximity
| crate::RankingRule::Exactness => continue,
crate::RankingRule::Boost(filter) => ranking_rules.push(Box::new(Boost::new(filter)?)),
crate::RankingRule::Sort => {
if sort {
continue;
}
@@ -222,14 +225,14 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
)?;
sort = true;
}
crate::Criterion::Asc(field_name) => {
crate::RankingRule::Asc(field_name) => {
if sorted_fields.contains(&field_name) {
continue;
}
sorted_fields.insert(field_name.clone());
ranking_rules.push(Box::new(Sort::new(ctx.index, ctx.txn, field_name, true)?));
}
crate::Criterion::Desc(field_name) => {
crate::RankingRule::Desc(field_name) => {
if sorted_fields.contains(&field_name) {
continue;
}
@@ -268,10 +271,10 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
for rr in settings_ranking_rules {
// Add Words before any of: typo, proximity, attribute
match rr {
crate::Criterion::Typo
| crate::Criterion::Attribute
| crate::Criterion::Proximity
| crate::Criterion::Exactness => {
crate::RankingRule::Typo
| crate::RankingRule::Attribute
| crate::RankingRule::Proximity
| crate::RankingRule::Exactness => {
if !words {
ranking_rules.push(Box::new(Words::new(terms_matching_strategy)));
words = true;
@@ -280,28 +283,31 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
_ => {}
}
match rr {
crate::Criterion::Words => {
crate::RankingRule::Words => {
if words {
continue;
}
ranking_rules.push(Box::new(Words::new(terms_matching_strategy)));
words = true;
}
crate::Criterion::Typo => {
crate::RankingRule::Boost(filter) => {
ranking_rules.push(Box::new(Boost::new(filter)?));
}
crate::RankingRule::Typo => {
if typo {
continue;
}
typo = true;
ranking_rules.push(Box::new(Typo::new(None)));
}
crate::Criterion::Proximity => {
crate::RankingRule::Proximity => {
if proximity {
continue;
}
proximity = true;
ranking_rules.push(Box::new(Proximity::new(None)));
}
crate::Criterion::Attribute => {
crate::RankingRule::Attribute => {
if attribute {
continue;
}
@@ -309,7 +315,7 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
ranking_rules.push(Box::new(Fid::new(None)));
ranking_rules.push(Box::new(Position::new(None)));
}
crate::Criterion::Sort => {
crate::RankingRule::Sort => {
if sort {
continue;
}
@@ -323,7 +329,7 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
)?;
sort = true;
}
crate::Criterion::Exactness => {
crate::RankingRule::Exactness => {
if exactness {
continue;
}
@@ -331,14 +337,15 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
ranking_rules.push(Box::new(Exactness::new()));
exactness = true;
}
crate::Criterion::Asc(field_name) => {
crate::RankingRule::Asc(field_name) => {
// TODO Question: Why would it be invalid to sort price:asc, typo, price:desc?
if sorted_fields.contains(&field_name) {
continue;
}
sorted_fields.insert(field_name.clone());
ranking_rules.push(Box::new(Sort::new(ctx.index, ctx.txn, field_name, true)?));
}
crate::Criterion::Desc(field_name) => {
crate::RankingRule::Desc(field_name) => {
if sorted_fields.contains(&field_name) {
continue;
}
@@ -580,7 +587,8 @@ fn check_sort_criteria(ctx: &SearchContext, sort_criteria: Option<&Vec<AscDesc>>
// We check that the sort ranking rule exists and throw an
// error if we try to use it and that it doesn't.
let sort_ranking_rule_missing = !ctx.index.criteria(ctx.txn)?.contains(&crate::Criterion::Sort);
let sort_ranking_rule_missing =
!ctx.index.criteria(ctx.txn)?.contains(&crate::RankingRule::Sort);
if sort_ranking_rule_missing {
return Err(UserError::SortRankingRuleMissing.into());
}

View File

@@ -1,5 +1,5 @@
use crate::index::tests::TempIndex;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -12,7 +12,7 @@ fn create_index() -> TempIndex {
"description".to_owned(),
"plot".to_owned(),
]);
s.set_criteria(vec![Criterion::Attribute]);
s.set_ranking_rules(vec![RankingRule::Attribute]);
})
.unwrap();

View File

@@ -1,5 +1,5 @@
use crate::index::tests::TempIndex;
use crate::{db_snap, Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{db_snap, RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -12,7 +12,7 @@ fn create_index() -> TempIndex {
"text2".to_owned(),
"other".to_owned(),
]);
s.set_criteria(vec![Criterion::Attribute]);
s.set_ranking_rules(vec![RankingRule::Attribute]);
})
.unwrap();

View File

@@ -19,7 +19,7 @@ use maplit::hashset;
use super::collect_field_values;
use crate::index::tests::TempIndex;
use crate::{AscDesc, Criterion, Index, Member, Search, SearchResult, TermsMatchingStrategy};
use crate::{AscDesc, Index, Member, RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -30,7 +30,7 @@ fn create_index() -> TempIndex {
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_sortable_fields(hashset! { S("rank1"), S("letter") });
s.set_distinct_field("letter".to_owned());
s.set_criteria(vec![Criterion::Words]);
s.set_ranking_rules(vec![RankingRule::Words]);
})
.unwrap();
@@ -252,7 +252,7 @@ fn test_distinct_placeholder_sort() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Sort]);
s.set_ranking_rules(vec![RankingRule::Sort]);
})
.unwrap();
@@ -387,7 +387,7 @@ fn test_distinct_words() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words]);
s.set_ranking_rules(vec![RankingRule::Words]);
})
.unwrap();
@@ -440,7 +440,11 @@ fn test_distinct_sort_words() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Sort, Criterion::Words, Criterion::Desc(S("rank1"))]);
s.set_ranking_rules(vec![
RankingRule::Sort,
RankingRule::Words,
RankingRule::Desc(S("rank1")),
]);
})
.unwrap();
@@ -513,7 +517,7 @@ fn test_distinct_all_candidates() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Sort]);
s.set_ranking_rules(vec![RankingRule::Sort]);
})
.unwrap();
@@ -536,7 +540,7 @@ fn test_distinct_typo() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words, Criterion::Typo]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Typo]);
})
.unwrap();

View File

@@ -21,7 +21,7 @@ Then these rules will only work with
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index_simple_ordered() -> TempIndex {
let index = TempIndex::new();
@@ -30,7 +30,7 @@ fn create_index_simple_ordered() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness]);
s.set_ranking_rules(vec![RankingRule::Exactness]);
})
.unwrap();
@@ -89,7 +89,7 @@ fn create_index_simple_reversed() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness]);
s.set_ranking_rules(vec![RankingRule::Exactness]);
})
.unwrap();
@@ -147,7 +147,7 @@ fn create_index_simple_random() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness]);
s.set_ranking_rules(vec![RankingRule::Exactness]);
})
.unwrap();
@@ -201,7 +201,7 @@ fn create_index_attribute_starts_with() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness]);
s.set_ranking_rules(vec![RankingRule::Exactness]);
})
.unwrap();
@@ -251,7 +251,7 @@ fn create_index_simple_ordered_with_typos() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness]);
s.set_ranking_rules(vec![RankingRule::Exactness]);
})
.unwrap();
@@ -350,7 +350,11 @@ fn create_index_with_varying_proximities() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness, Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![
RankingRule::Exactness,
RankingRule::Words,
RankingRule::Proximity,
]);
})
.unwrap();
@@ -404,7 +408,7 @@ fn create_index_with_typo_and_prefix() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness]);
s.set_ranking_rules(vec![RankingRule::Exactness]);
})
.unwrap();
@@ -442,7 +446,11 @@ fn create_index_all_equal_except_proximity_between_ignored_terms() -> TempIndex
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Exactness, Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![
RankingRule::Exactness,
RankingRule::Words,
RankingRule::Proximity,
]);
})
.unwrap();
@@ -698,7 +706,7 @@ fn test_exactness_after_words() {
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words, Criterion::Exactness]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Exactness]);
})
.unwrap();
@@ -747,7 +755,7 @@ fn test_words_after_exactness() {
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Exactness, Criterion::Words]);
s.set_ranking_rules(vec![RankingRule::Exactness, RankingRule::Words]);
})
.unwrap();
@@ -796,7 +804,11 @@ fn test_proximity_after_exactness() {
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Exactness, Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![
RankingRule::Exactness,
RankingRule::Words,
RankingRule::Proximity,
]);
})
.unwrap();
@@ -834,7 +846,11 @@ fn test_proximity_after_exactness() {
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Exactness, Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![
RankingRule::Exactness,
RankingRule::Words,
RankingRule::Proximity,
]);
})
.unwrap();
@@ -868,7 +884,11 @@ fn test_exactness_followed_by_typo_prefer_no_typo_prefix() {
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Exactness, Criterion::Words, Criterion::Typo]);
s.set_ranking_rules(vec![
RankingRule::Exactness,
RankingRule::Words,
RankingRule::Typo,
]);
})
.unwrap();
@@ -904,7 +924,11 @@ fn test_typo_followed_by_exactness() {
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words, Criterion::Typo, Criterion::Exactness]);
s.set_ranking_rules(vec![
RankingRule::Words,
RankingRule::Typo,
RankingRule::Exactness,
]);
})
.unwrap();

View File

@@ -9,7 +9,7 @@ use maplit::hashset;
use crate::index::tests::TempIndex;
use crate::score_details::ScoreDetails;
use crate::search::new::tests::collect_field_values;
use crate::{AscDesc, Criterion, GeoSortStrategy, Member, Search, SearchResult};
use crate::{AscDesc, GeoSortStrategy, Member, RankingRule, Search, SearchResult};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -18,7 +18,7 @@ fn create_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_sortable_fields(hashset! { S("_geo") });
s.set_criteria(vec![Criterion::Words, Criterion::Sort]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Sort]);
})
.unwrap();
index

View File

@@ -6,10 +6,10 @@ use maplit::{btreemap, hashset};
use crate::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use crate::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
use crate::{db_snap, Criterion, Index, Object};
use crate::{db_snap, Index, Object, RankingRule};
pub const CONTENT: &str = include_str!("../../../../tests/assets/test_set.ndjson");
pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
pub fn setup_search_index_with_criteria(criteria: &[RankingRule]) -> Index {
let path = tempfile::tempdir().unwrap();
let mut options = EnvOpenOptions::new();
options.map_size(10 * 1024 * 1024); // 10 MB
@@ -20,7 +20,7 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
let mut builder = Settings::new(&mut wtxn, &index, &config);
builder.set_criteria(criteria.to_vec());
builder.set_ranking_rules(criteria.to_vec());
builder.set_filterable_fields(hashset! {
S("tag"),
S("asc_desc_rank"),
@@ -70,6 +70,6 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
#[test]
fn snapshot_integration_dataset() {
let index = setup_search_index_with_criteria(&[Criterion::Attribute]);
let index = setup_search_index_with_criteria(&[RankingRule::Attribute]);
db_snap!(index, word_position_docids, @"3c9347a767bceef3beb31465f1e5f3ae");
}

View File

@@ -19,7 +19,7 @@ This module tests the following properties:
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -28,7 +28,7 @@ fn create_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Words]);
s.set_ranking_rules(vec![RankingRule::Words]);
})
.unwrap();

View File

@@ -19,7 +19,7 @@ use std::collections::BTreeMap;
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_simple_index() -> TempIndex {
let index = TempIndex::new();
@@ -28,7 +28,7 @@ fn create_simple_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Proximity]);
})
.unwrap();
@@ -94,7 +94,7 @@ fn create_edge_cases_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Proximity]);
})
.unwrap();

View File

@@ -8,7 +8,7 @@ implemented.
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -17,7 +17,11 @@ fn create_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Words, Criterion::Proximity, Criterion::Typo]);
s.set_ranking_rules(vec![
RankingRule::Words,
RankingRule::Proximity,
RankingRule::Typo,
]);
})
.unwrap();

View File

@@ -17,7 +17,7 @@ use maplit::hashset;
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{
score_details, AscDesc, Criterion, Member, Search, SearchResult, TermsMatchingStrategy,
score_details, AscDesc, Member, RankingRule, Search, SearchResult, TermsMatchingStrategy,
};
fn create_index() -> TempIndex {
@@ -28,7 +28,7 @@ fn create_index() -> TempIndex {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_sortable_fields(hashset! { S("rank"), S("vague"), S("letter") });
s.set_criteria(vec![Criterion::Sort]);
s.set_ranking_rules(vec![RankingRule::Sort]);
})
.unwrap();
@@ -331,7 +331,7 @@ fn test_redacted() {
.update_settings(|s| {
s.set_displayed_fields(vec!["text".to_owned(), "vague".to_owned()]);
s.set_sortable_fields(hashset! { S("rank"), S("vague"), S("letter") });
s.set_criteria(vec![Criterion::Sort]);
s.set_ranking_rules(vec![RankingRule::Sort]);
})
.unwrap();

View File

@@ -22,7 +22,7 @@ use std::collections::BTreeMap;
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -31,7 +31,7 @@ fn create_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Words]);
s.set_ranking_rules(vec![RankingRule::Words]);
})
.unwrap();
@@ -457,7 +457,7 @@ fn test_typo_ranking_rule_not_preceded_by_words_ranking_rule() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Typo]);
s.set_ranking_rules(vec![RankingRule::Typo]);
})
.unwrap();
@@ -495,7 +495,7 @@ fn test_typo_ranking_rule_not_preceded_by_words_ranking_rule() {
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words, Criterion::Typo]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Typo]);
})
.unwrap();
@@ -540,7 +540,7 @@ fn test_typo_bucketing() {
drop(txn);
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Typo]);
s.set_ranking_rules(vec![RankingRule::Typo]);
})
.unwrap();
let txn = index.read_txn().unwrap();
@@ -589,7 +589,7 @@ fn test_typo_synonyms() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Typo]);
s.set_ranking_rules(vec![RankingRule::Typo]);
let mut synonyms = BTreeMap::new();
synonyms.insert("lackadaisical".to_owned(), vec!["lazy".to_owned()]);

View File

@@ -17,7 +17,7 @@ because the typo ranking rule before it only used the derivation `beautiful`.
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -26,7 +26,11 @@ fn create_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Words, Criterion::Typo, Criterion::Proximity]);
s.set_ranking_rules(vec![
RankingRule::Words,
RankingRule::Typo,
RankingRule::Proximity,
]);
})
.unwrap();

View File

@@ -14,7 +14,7 @@ account by the proximity ranking rule.
use crate::index::tests::TempIndex;
use crate::search::new::tests::collect_field_values;
use crate::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use crate::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
fn create_index() -> TempIndex {
let index = TempIndex::new();
@@ -23,7 +23,7 @@ fn create_index() -> TempIndex {
.update_settings(|s| {
s.set_primary_key("id".to_owned());
s.set_searchable_fields(vec!["text".to_owned()]);
s.set_criteria(vec![Criterion::Words]);
s.set_ranking_rules(vec![RankingRule::Words]);
})
.unwrap();
@@ -265,7 +265,7 @@ fn test_words_proximity_tms_last_simple() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Proximity]);
})
.unwrap();
@@ -346,7 +346,7 @@ fn test_words_proximity_tms_last_phrase() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Proximity]);
})
.unwrap();
@@ -416,7 +416,7 @@ fn test_words_tms_all() {
let index = create_index();
index
.update_settings(|s| {
s.set_criteria(vec![Criterion::Words, Criterion::Proximity]);
s.set_ranking_rules(vec![RankingRule::Words, RankingRule::Proximity]);
})
.unwrap();

View File

@@ -9,9 +9,9 @@ use time::OffsetDateTime;
use super::index_documents::{IndexDocumentsConfig, Transform};
use super::IndexerConfig;
use crate::criterion::Criterion;
use crate::error::UserError;
use crate::index::{DEFAULT_MIN_WORD_LEN_ONE_TYPO, DEFAULT_MIN_WORD_LEN_TWO_TYPOS};
use crate::ranking_rule::RankingRule;
use crate::update::index_documents::IndexDocumentsMethod;
use crate::update::{IndexDocuments, UpdateIndexingStep};
use crate::{FieldsIdsMap, Index, OrderBy, Result};
@@ -110,7 +110,7 @@ pub struct Settings<'a, 't, 'u, 'i> {
displayed_fields: Setting<Vec<String>>,
filterable_fields: Setting<HashSet<String>>,
sortable_fields: Setting<HashSet<String>>,
criteria: Setting<Vec<Criterion>>,
ranking_rules: Setting<Vec<RankingRule>>,
stop_words: Setting<BTreeSet<String>>,
non_separator_tokens: Setting<BTreeSet<String>>,
separator_tokens: Setting<BTreeSet<String>>,
@@ -142,7 +142,7 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
displayed_fields: Setting::NotSet,
filterable_fields: Setting::NotSet,
sortable_fields: Setting::NotSet,
criteria: Setting::NotSet,
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,
non_separator_tokens: Setting::NotSet,
separator_tokens: Setting::NotSet,
@@ -194,12 +194,12 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
self.sortable_fields = Setting::Reset;
}
pub fn reset_criteria(&mut self) {
self.criteria = Setting::Reset;
pub fn reset_ranking_rules(&mut self) {
self.ranking_rules = Setting::Reset;
}
pub fn set_criteria(&mut self, criteria: Vec<Criterion>) {
self.criteria = Setting::Set(criteria);
pub fn set_ranking_rules(&mut self, ranking_rules: Vec<RankingRule>) {
self.ranking_rules = Setting::Set(ranking_rules);
}
pub fn reset_stop_words(&mut self) {
@@ -696,7 +696,7 @@ impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
}
fn update_criteria(&mut self) -> Result<()> {
match &self.criteria {
match &self.ranking_rules {
Setting::Set(criteria) => {
self.index.put_criteria(self.wtxn, criteria)?;
}
@@ -924,7 +924,7 @@ mod tests {
use crate::error::Error;
use crate::index::tests::TempIndex;
use crate::update::{ClearDocuments, DeleteDocuments};
use crate::{Criterion, Filter, SearchResult};
use crate::{Filter, RankingRule, SearchResult};
#[test]
fn set_and_reset_searchable_fields() {
@@ -1167,7 +1167,7 @@ mod tests {
index
.update_settings(|settings| {
settings.set_displayed_fields(vec![S("name")]);
settings.set_criteria(vec![Criterion::Asc("age".to_owned())]);
settings.set_ranking_rules(vec![RankingRule::Asc("age".to_owned())]);
})
.unwrap();
@@ -1473,7 +1473,7 @@ mod tests {
.update_settings(|settings| {
settings.set_displayed_fields(vec!["hello".to_string()]);
settings.set_filterable_fields(hashset! { S("age"), S("toto") });
settings.set_criteria(vec![Criterion::Asc(S("toto"))]);
settings.set_ranking_rules(vec![RankingRule::Asc(S("toto"))]);
})
.unwrap();
@@ -1482,7 +1482,7 @@ mod tests {
assert_eq!(&["hello"][..], index.displayed_fields(&rtxn).unwrap().unwrap());
// since no documents have been pushed the primary key is still unset
assert!(index.primary_key(&rtxn).unwrap().is_none());
assert_eq!(vec![Criterion::Asc("toto".to_string())], index.criteria(&rtxn).unwrap());
assert_eq!(vec![RankingRule::Asc("toto".to_string())], index.criteria(&rtxn).unwrap());
drop(rtxn);
// We set toto and age as searchable to force reordering of the fields
@@ -1495,7 +1495,7 @@ mod tests {
let rtxn = index.read_txn().unwrap();
assert_eq!(&["hello"][..], index.displayed_fields(&rtxn).unwrap().unwrap());
assert!(index.primary_key(&rtxn).unwrap().is_none());
assert_eq!(vec![Criterion::Asc("toto".to_string())], index.criteria(&rtxn).unwrap());
assert_eq!(vec![RankingRule::Asc("toto".to_string())], index.criteria(&rtxn).unwrap());
}
#[test]
@@ -1507,7 +1507,7 @@ mod tests {
.update_settings(|settings| {
settings.set_displayed_fields(vec!["hello".to_string()]);
// It is only Asc(toto), there is a facet database but it is denied to filter with toto.
settings.set_criteria(vec![Criterion::Asc(S("toto"))]);
settings.set_ranking_rules(vec![RankingRule::Asc(S("toto"))]);
})
.unwrap();
@@ -1715,7 +1715,7 @@ mod tests {
displayed_fields,
filterable_fields,
sortable_fields,
criteria,
ranking_rules: criteria,
stop_words,
non_separator_tokens,
separator_tokens,

View File

@@ -2,8 +2,8 @@ use std::collections::HashSet;
use big_s::S;
use milli::update::Settings;
use milli::{Criterion, Search, SearchResult, TermsMatchingStrategy};
use Criterion::*;
use milli::{RankingRule, Search, SearchResult, TermsMatchingStrategy};
use RankingRule::*;
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};

View File

@@ -1,6 +1,6 @@
use either::{Either, Left, Right};
use milli::{Criterion, Filter, Search, SearchResult, TermsMatchingStrategy};
use Criterion::*;
use milli::{Filter, RankingRule, Search, SearchResult, TermsMatchingStrategy};
use RankingRule::*;
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};

View File

@@ -8,7 +8,7 @@ use heed::EnvOpenOptions;
use maplit::{btreemap, hashset};
use milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
use milli::{AscDesc, Criterion, DocumentId, Index, Member, Object, TermsMatchingStrategy};
use milli::{AscDesc, DocumentId, Index, Member, Object, RankingRule, TermsMatchingStrategy};
use serde::{Deserialize, Deserializer};
use slice_group_by::GroupBy;
@@ -27,7 +27,7 @@ pub const EXTERNAL_DOCUMENTS_IDS: &[&str; 17] =
pub const CONTENT: &str = include_str!("../assets/test_set.ndjson");
pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
pub fn setup_search_index_with_criteria(criteria: &[RankingRule]) -> Index {
let path = tempfile::tempdir().unwrap();
let mut options = EnvOpenOptions::new();
options.map_size(10 * 1024 * 1024); // 10 MB
@@ -38,7 +38,7 @@ pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
let mut builder = Settings::new(&mut wtxn, &index, &config);
builder.set_criteria(criteria.to_vec());
builder.set_ranking_rules(criteria.to_vec());
builder.set_filterable_fields(hashset! {
S("tag"),
S("asc_desc_rank"),
@@ -95,7 +95,7 @@ pub fn internal_to_external_ids(index: &Index, internal_ids: &[DocumentId]) -> V
}
pub fn expected_order(
criteria: &[Criterion],
criteria: &[RankingRule],
optional_words: TermsMatchingStrategy,
sort_by: &[AscDesc],
) -> Vec<TestDocument> {
@@ -107,47 +107,56 @@ pub fn expected_order(
let mut new_groups = Vec::new();
for group in groups.iter_mut() {
match criterion {
Criterion::Attribute => {
RankingRule::Attribute => {
group.sort_by_key(|d| d.attribute_rank);
new_groups
.extend(group.linear_group_by_key(|d| d.attribute_rank).map(Vec::from));
}
Criterion::Exactness => {
RankingRule::Exactness => {
group.sort_by_key(|d| d.exact_rank);
new_groups.extend(group.linear_group_by_key(|d| d.exact_rank).map(Vec::from));
}
Criterion::Proximity => {
RankingRule::Proximity => {
group.sort_by_key(|d| d.proximity_rank);
new_groups
.extend(group.linear_group_by_key(|d| d.proximity_rank).map(Vec::from));
}
Criterion::Sort if sort_by == [AscDesc::Asc(Member::Field(S("tag")))] => {
RankingRule::Sort if sort_by == [AscDesc::Asc(Member::Field(S("tag")))] => {
group.sort_by_key(|d| d.sort_by_rank);
new_groups.extend(group.linear_group_by_key(|d| d.sort_by_rank).map(Vec::from));
}
Criterion::Sort if sort_by == [AscDesc::Desc(Member::Field(S("tag")))] => {
RankingRule::Sort if sort_by == [AscDesc::Desc(Member::Field(S("tag")))] => {
group.sort_by_key(|d| Reverse(d.sort_by_rank));
new_groups.extend(group.linear_group_by_key(|d| d.sort_by_rank).map(Vec::from));
}
Criterion::Typo => {
RankingRule::Typo => {
group.sort_by_key(|d| d.typo_rank);
new_groups.extend(group.linear_group_by_key(|d| d.typo_rank).map(Vec::from));
}
Criterion::Words => {
RankingRule::Words => {
group.sort_by_key(|d| d.word_rank);
new_groups.extend(group.linear_group_by_key(|d| d.word_rank).map(Vec::from));
}
Criterion::Asc(field_name) if field_name == "asc_desc_rank" => {
RankingRule::Asc(field_name) if field_name == "asc_desc_rank" => {
group.sort_by_key(|d| d.asc_desc_rank);
new_groups
.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
}
Criterion::Desc(field_name) if field_name == "asc_desc_rank" => {
RankingRule::Desc(field_name) if field_name == "asc_desc_rank" => {
group.sort_by_key(|d| Reverse(d.asc_desc_rank));
new_groups
.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
}
Criterion::Asc(_) | Criterion::Desc(_) | Criterion::Sort => {
RankingRule::Boost(filter) => {
// move the matching documents first, then the ones that don't match
group.sort_by_key(|d| if execute_filter(filter, d).is_some() { 0 } else { 1 });
new_groups.extend(
group
.linear_group_by_key(|d| execute_filter(filter, d).is_some())
.map(Vec::from),
);
}
RankingRule::Asc(_) | RankingRule::Desc(_) | RankingRule::Sort => {
new_groups.push(group.clone())
}
}

View File

@@ -1,7 +1,7 @@
use milli::update::{IndexerConfig, Settings};
use milli::{Criterion, Index, Search, TermsMatchingStrategy};
use milli::{Index, RankingRule, Search, TermsMatchingStrategy};
use crate::search::Criterion::{Attribute, Exactness, Proximity};
use crate::search::RankingRule::{Attribute, Exactness, Proximity};
fn set_stop_words(index: &Index, stop_words: &[&str]) {
let mut wtxn = index.write_txn().unwrap();
@@ -14,7 +14,7 @@ fn set_stop_words(index: &Index, stop_words: &[&str]) {
wtxn.commit().unwrap();
}
fn test_phrase_search_with_stop_words_given_criteria(criteria: &[Criterion]) {
fn test_phrase_search_with_stop_words_given_criteria(criteria: &[RankingRule]) {
let index = super::setup_search_index_with_criteria(criteria);
// Add stop_words

View File

@@ -7,9 +7,9 @@ use itertools::Itertools;
use maplit::hashset;
use milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
use milli::{AscDesc, Criterion, Index, Member, Search, SearchResult, TermsMatchingStrategy};
use milli::{AscDesc, Index, Member, RankingRule, Search, SearchResult, TermsMatchingStrategy};
use rand::Rng;
use Criterion::*;
use RankingRule::*;
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
@@ -88,7 +88,7 @@ test_criterion!(
#[test]
fn criteria_mixup() {
use Criterion::*;
use RankingRule::*;
let index = search::setup_search_index_with_criteria(&[
Words,
Attribute,
@@ -233,7 +233,7 @@ fn criteria_mixup() {
//update criteria
let mut wtxn = index.write_txn().unwrap();
let mut builder = Settings::new(&mut wtxn, &index, &config);
builder.set_criteria(criteria.clone());
builder.set_ranking_rules(criteria.clone());
builder.execute(|_| (), || false).unwrap();
wtxn.commit().unwrap();
@@ -324,7 +324,7 @@ fn criteria_ascdesc() {
let mut wtxn = index.write_txn().unwrap();
let mut builder = Settings::new(&mut wtxn, &index, &config);
builder.set_criteria(vec![criterion.clone()]);
builder.set_ranking_rules(vec![criterion.clone()]);
builder.execute(|_| (), || false).unwrap();
wtxn.commit().unwrap();

View File

@@ -1,5 +1,5 @@
use big_s::S;
use milli::Criterion::{Attribute, Exactness, Proximity, Typo, Words};
use milli::RankingRule::{Attribute, Exactness, Proximity, Typo, Words};
use milli::{AscDesc, Error, Member, Search, TermsMatchingStrategy, UserError};
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};

View File

@@ -2,10 +2,10 @@ use std::collections::BTreeSet;
use heed::EnvOpenOptions;
use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
use milli::{Criterion, Index, Search, TermsMatchingStrategy};
use milli::{Index, RankingRule, Search, TermsMatchingStrategy};
use serde_json::json;
use tempfile::tempdir;
use Criterion::*;
use RankingRule::*;
#[test]
fn test_typo_tolerance_one_typo() {