mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-26 16:21:07 +00:00
212: Introduce integration test on criteria r=Kerollmops a=ManyTheFish - add pre-ranked dataset - test each criterion 1 by 1 - test all criteria in several order 222: Move the `UpdateStore` into the http-ui crate r=Kerollmops a=Kerollmops We no more need to have the `UpdateStore` inside of the mill crate as this is the job of the caller to stack the updates and sequentially give them to milli. 223: Update dataset links r=Kerollmops a=curquiza Co-authored-by: many <maxime@meilisearch.com> Co-authored-by: Many <legendre.maxime.isn@gmail.com> Co-authored-by: Kerollmops <clement@meilisearch.com> Co-authored-by: Clémentine Urquizar <clementine@meilisearch.com>
This commit is contained in:
@ -9,7 +9,6 @@ anyhow = "1.0.38"
|
||||
bstr = "0.2.15"
|
||||
byteorder = "1.4.2"
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
crossbeam-channel = "0.5.0"
|
||||
csv = "1.1.5"
|
||||
either = "1.6.1"
|
||||
flate2 = "1.0.20"
|
||||
|
@ -4,7 +4,6 @@ mod criterion;
|
||||
mod external_documents_ids;
|
||||
mod fields_ids_map;
|
||||
mod search;
|
||||
mod update_store;
|
||||
pub mod facet;
|
||||
pub mod heed_codec;
|
||||
pub mod index;
|
||||
@ -29,7 +28,6 @@ pub use self::heed_codec::{RoaringBitmapLenCodec, BoRoaringBitmapLenCodec, CboRo
|
||||
pub use self::index::Index;
|
||||
pub use self::search::{Search, FacetDistribution, FilterCondition, SearchResult, MatchingWords};
|
||||
pub use self::tree_level::TreeLevel;
|
||||
pub use self::update_store::UpdateStore;
|
||||
|
||||
pub type FastMap4<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher32>>;
|
||||
pub type FastMap8<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher64>>;
|
||||
|
@ -1,360 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crossbeam_channel::Sender;
|
||||
use heed::types::{OwnedType, DecodeIgnore, SerdeJson, ByteSlice};
|
||||
use heed::{EnvOpenOptions, Env, Database};
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
use crate::BEU64;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct UpdateStore<M, N> {
|
||||
env: Env,
|
||||
pending_meta: Database<OwnedType<BEU64>, SerdeJson<M>>,
|
||||
pending: Database<OwnedType<BEU64>, ByteSlice>,
|
||||
processed_meta: Database<OwnedType<BEU64>, SerdeJson<N>>,
|
||||
aborted_meta: Database<OwnedType<BEU64>, SerdeJson<M>>,
|
||||
notification_sender: Sender<()>,
|
||||
}
|
||||
|
||||
pub trait UpdateHandler<M, N> {
|
||||
fn handle_update(&mut self, update_id: u64, meta: M, content: &[u8]) -> heed::Result<N>;
|
||||
}
|
||||
|
||||
impl<M, N, F> UpdateHandler<M, N> for F
|
||||
where F: FnMut(u64, M, &[u8]) -> heed::Result<N> + Send + 'static {
|
||||
fn handle_update(&mut self, update_id: u64, meta: M, content: &[u8]) -> heed::Result<N> {
|
||||
self(update_id, meta, content)
|
||||
}
|
||||
}
|
||||
|
||||
impl<M: 'static, N: 'static> UpdateStore<M, N> {
|
||||
pub fn open<P, U>(
|
||||
mut options: EnvOpenOptions,
|
||||
path: P,
|
||||
mut update_handler: U,
|
||||
) -> heed::Result<Arc<UpdateStore<M, N>>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
U: UpdateHandler<M, N> + Send + 'static,
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: Serialize,
|
||||
{
|
||||
options.max_dbs(4);
|
||||
let env = options.open(path)?;
|
||||
let pending_meta = env.create_database(Some("pending-meta"))?;
|
||||
let pending = env.create_database(Some("pending"))?;
|
||||
let processed_meta = env.create_database(Some("processed-meta"))?;
|
||||
let aborted_meta = env.create_database(Some("aborted-meta"))?;
|
||||
|
||||
let (notification_sender, notification_receiver) = crossbeam_channel::bounded(1);
|
||||
// Send a first notification to trigger the process.
|
||||
let _ = notification_sender.send(());
|
||||
|
||||
let update_store = Arc::new(UpdateStore {
|
||||
env,
|
||||
pending,
|
||||
pending_meta,
|
||||
processed_meta,
|
||||
aborted_meta,
|
||||
notification_sender,
|
||||
});
|
||||
|
||||
let update_store_cloned = update_store.clone();
|
||||
std::thread::spawn(move || {
|
||||
// Block and wait for something to process.
|
||||
for () in notification_receiver {
|
||||
loop {
|
||||
match update_store_cloned.process_pending_update(&mut update_handler) {
|
||||
Ok(Some(_)) => (),
|
||||
Ok(None) => break,
|
||||
Err(e) => eprintln!("error while processing update: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(update_store)
|
||||
}
|
||||
|
||||
/// Returns the new biggest id to use to store the new update.
|
||||
fn new_update_id(&self, txn: &heed::RoTxn) -> heed::Result<u64> {
|
||||
let last_pending = self.pending_meta
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.last(txn)?
|
||||
.map(|(k, _)| k.get());
|
||||
|
||||
let last_processed = self.processed_meta
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.last(txn)?
|
||||
.map(|(k, _)| k.get());
|
||||
|
||||
let last_aborted = self.aborted_meta
|
||||
.remap_data_type::<DecodeIgnore>()
|
||||
.last(txn)?
|
||||
.map(|(k, _)| k.get());
|
||||
|
||||
let last_update_id = [last_pending, last_processed, last_aborted]
|
||||
.iter()
|
||||
.copied()
|
||||
.flatten()
|
||||
.max();
|
||||
|
||||
match last_update_id {
|
||||
Some(last_id) => Ok(last_id + 1),
|
||||
None => Ok(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Registers the update content in the pending store and the meta
|
||||
/// into the pending-meta store. Returns the new unique update id.
|
||||
pub fn register_update(&self, meta: &M, content: &[u8]) -> heed::Result<u64>
|
||||
where M: Serialize,
|
||||
{
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
|
||||
// We ask the update store to give us a new update id, this is safe,
|
||||
// no other update can have the same id because we use a write txn before
|
||||
// asking for the id and registering it so other update registering
|
||||
// will be forced to wait for a new write txn.
|
||||
let update_id = self.new_update_id(&wtxn)?;
|
||||
let update_key = BEU64::new(update_id);
|
||||
|
||||
self.pending_meta.put(&mut wtxn, &update_key, meta)?;
|
||||
self.pending.put(&mut wtxn, &update_key, content)?;
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
if let Err(e) = self.notification_sender.try_send(()) {
|
||||
assert!(!e.is_disconnected(), "update notification channel is disconnected");
|
||||
}
|
||||
|
||||
Ok(update_id)
|
||||
}
|
||||
|
||||
/// Executes the user provided function on the next pending update (the one with the lowest id).
|
||||
/// This is asynchronous as it let the user process the update with a read-only txn and
|
||||
/// only writing the result meta to the processed-meta store *after* it has been processed.
|
||||
fn process_pending_update<U>(&self, handler: &mut U) -> heed::Result<Option<(u64, N)>>
|
||||
where
|
||||
U: UpdateHandler<M, N>,
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: Serialize,
|
||||
{
|
||||
// Create a read transaction to be able to retrieve the pending update in order.
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let first_meta = self.pending_meta.first(&rtxn)?;
|
||||
|
||||
// If there is a pending update we process and only keep
|
||||
// a reader while processing it, not a writer.
|
||||
match first_meta {
|
||||
Some((first_id, first_meta)) => {
|
||||
let first_content = self.pending
|
||||
.get(&rtxn, &first_id)?
|
||||
.expect("associated update content");
|
||||
|
||||
// Process the pending update using the provided user function.
|
||||
let new_meta = handler.handle_update(first_id.get(), first_meta, first_content)?;
|
||||
drop(rtxn);
|
||||
|
||||
// Once the pending update have been successfully processed
|
||||
// we must remove the content from the pending stores and
|
||||
// write the *new* meta to the processed-meta store and commit.
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
self.pending_meta.delete(&mut wtxn, &first_id)?;
|
||||
self.pending.delete(&mut wtxn, &first_id)?;
|
||||
self.processed_meta.put(&mut wtxn, &first_id, &new_meta)?;
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(Some((first_id.get(), new_meta)))
|
||||
},
|
||||
None => Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// The id and metadata of the update that is currently being processed,
|
||||
/// `None` if no update is being processed.
|
||||
pub fn processing_update(&self) -> heed::Result<Option<(u64, M)>>
|
||||
where M: for<'a> Deserialize<'a>,
|
||||
{
|
||||
let rtxn = self.env.read_txn()?;
|
||||
match self.pending_meta.first(&rtxn)? {
|
||||
Some((key, meta)) => Ok(Some((key.get(), meta))),
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Execute the user defined function with the meta-store iterators, the first
|
||||
/// iterator is the *processed* meta one, the second the *aborted* meta one
|
||||
/// and, the last is the *pending* meta one.
|
||||
pub fn iter_metas<F, T>(&self, mut f: F) -> heed::Result<T>
|
||||
where
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: for<'a> Deserialize<'a>,
|
||||
F: for<'a> FnMut(
|
||||
heed::RoIter<'a, OwnedType<BEU64>, SerdeJson<N>>,
|
||||
heed::RoIter<'a, OwnedType<BEU64>, SerdeJson<M>>,
|
||||
heed::RoIter<'a, OwnedType<BEU64>, SerdeJson<M>>,
|
||||
) -> heed::Result<T>,
|
||||
{
|
||||
let rtxn = self.env.read_txn()?;
|
||||
|
||||
// We get the pending, processed and aborted meta iterators.
|
||||
let processed_iter = self.processed_meta.iter(&rtxn)?;
|
||||
let aborted_iter = self.aborted_meta.iter(&rtxn)?;
|
||||
let pending_iter = self.pending_meta.iter(&rtxn)?;
|
||||
|
||||
// We execute the user defined function with both iterators.
|
||||
(f)(processed_iter, aborted_iter, pending_iter)
|
||||
}
|
||||
|
||||
/// Returns the update associated meta or `None` if the update doesn't exist.
|
||||
pub fn meta(&self, update_id: u64) -> heed::Result<Option<UpdateStatusMeta<M, N>>>
|
||||
where
|
||||
M: for<'a> Deserialize<'a>,
|
||||
N: for<'a> Deserialize<'a>,
|
||||
{
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let key = BEU64::new(update_id);
|
||||
|
||||
if let Some(meta) = self.pending_meta.get(&rtxn, &key)? {
|
||||
return Ok(Some(UpdateStatusMeta::Pending(meta)));
|
||||
}
|
||||
|
||||
if let Some(meta) = self.processed_meta.get(&rtxn, &key)? {
|
||||
return Ok(Some(UpdateStatusMeta::Processed(meta)));
|
||||
}
|
||||
|
||||
if let Some(meta) = self.aborted_meta.get(&rtxn, &key)? {
|
||||
return Ok(Some(UpdateStatusMeta::Aborted(meta)));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Aborts an update, an aborted update content is deleted and
|
||||
/// the meta of it is moved into the aborted updates database.
|
||||
///
|
||||
/// Trying to abort an update that is currently being processed, an update
|
||||
/// that as already been processed or which doesn't actually exist, will
|
||||
/// return `None`.
|
||||
pub fn abort_update(&self, update_id: u64) -> heed::Result<Option<M>>
|
||||
where M: Serialize + for<'a> Deserialize<'a>,
|
||||
{
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let key = BEU64::new(update_id);
|
||||
|
||||
// We cannot abort an update that is currently being processed.
|
||||
if self.pending_meta.first(&wtxn)?.map(|(key, _)| key.get()) == Some(update_id) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let meta = match self.pending_meta.get(&wtxn, &key)? {
|
||||
Some(meta) => meta,
|
||||
None => return Ok(None),
|
||||
};
|
||||
|
||||
self.aborted_meta.put(&mut wtxn, &key, &meta)?;
|
||||
self.pending_meta.delete(&mut wtxn, &key)?;
|
||||
self.pending.delete(&mut wtxn, &key)?;
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(Some(meta))
|
||||
}
|
||||
|
||||
/// Aborts all the pending updates, and not the one being currently processed.
|
||||
/// Returns the update metas and ids that were successfully aborted.
|
||||
pub fn abort_pendings(&self) -> heed::Result<Vec<(u64, M)>>
|
||||
where M: Serialize + for<'a> Deserialize<'a>,
|
||||
{
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
let mut aborted_updates = Vec::new();
|
||||
|
||||
// We skip the first pending update as it is currently being processed.
|
||||
for result in self.pending_meta.iter(&wtxn)?.skip(1) {
|
||||
let (key, meta) = result?;
|
||||
let id = key.get();
|
||||
aborted_updates.push((id, meta));
|
||||
}
|
||||
|
||||
for (id, meta) in &aborted_updates {
|
||||
let key = BEU64::new(*id);
|
||||
self.aborted_meta.put(&mut wtxn, &key, &meta)?;
|
||||
self.pending_meta.delete(&mut wtxn, &key)?;
|
||||
self.pending.delete(&mut wtxn, &key)?;
|
||||
}
|
||||
|
||||
wtxn.commit()?;
|
||||
|
||||
Ok(aborted_updates)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
pub enum UpdateStatusMeta<M, N> {
|
||||
Pending(M),
|
||||
Processed(N),
|
||||
Aborted(M),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::thread;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[test]
|
||||
fn simple() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let options = EnvOpenOptions::new();
|
||||
let update_store = UpdateStore::open(options, dir, |_id, meta: String, _content:&_| {
|
||||
Ok(meta + " processed")
|
||||
}).unwrap();
|
||||
|
||||
let meta = String::from("kiki");
|
||||
let update_id = update_store.register_update(&meta, &[]).unwrap();
|
||||
|
||||
thread::sleep(Duration::from_millis(100));
|
||||
|
||||
let meta = update_store.meta(update_id).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("kiki processed")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn long_running_update() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let options = EnvOpenOptions::new();
|
||||
let update_store = UpdateStore::open(options, dir, |_id, meta: String, _content:&_| {
|
||||
thread::sleep(Duration::from_millis(400));
|
||||
Ok(meta + " processed")
|
||||
}).unwrap();
|
||||
|
||||
let before_register = Instant::now();
|
||||
|
||||
let meta = String::from("kiki");
|
||||
let update_id_kiki = update_store.register_update(&meta, &[]).unwrap();
|
||||
assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
let meta = String::from("coco");
|
||||
let update_id_coco = update_store.register_update(&meta, &[]).unwrap();
|
||||
assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
let meta = String::from("cucu");
|
||||
let update_id_cucu = update_store.register_update(&meta, &[]).unwrap();
|
||||
assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
thread::sleep(Duration::from_millis(400 * 3 + 100));
|
||||
|
||||
let meta = update_store.meta(update_id_kiki).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("kiki processed")));
|
||||
|
||||
let meta = update_store.meta(update_id_coco).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("coco processed")));
|
||||
|
||||
let meta = update_store.meta(update_id_cucu).unwrap().unwrap();
|
||||
assert_eq!(meta, UpdateStatusMeta::Processed(format!("cucu processed")));
|
||||
}
|
||||
}
|
17
milli/tests/assets/test_set.ndjson
Normal file
17
milli/tests/assets/test_set.ndjson
Normal file
@ -0,0 +1,17 @@
|
||||
{"id":"A","word_rank":0,"typo_rank":1,"proximity_rank":15,"attribute_rank":505,"exact_rank":5,"asc_desc_rank":0,"title":"hell o","description":"hell o is the fourteenth episode of the american television series glee performing songs with this word","tag":"blue","":""}
|
||||
{"id":"B","word_rank":2,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":4,"asc_desc_rank":1,"title":"hello","description":"hello is a song recorded by english singer songwriter adele","tag":"red","":""}
|
||||
{"id":"C","word_rank":0,"typo_rank":1,"proximity_rank":8,"attribute_rank":336,"exact_rank":4,"asc_desc_rank":2,"title":"hell on earth","description":"hell on earth is the third studio album by american hip hop duo mobb deep","tag":"blue","":""}
|
||||
{"id":"D","word_rank":0,"typo_rank":1,"proximity_rank":10,"attribute_rank":757,"exact_rank":4,"asc_desc_rank":3,"title":"hell on wheels tv series","description":"the construction of the first transcontinental railroad across the united states in the world","tag":"red","":""}
|
||||
{"id":"E","word_rank":2,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":1,"asc_desc_rank":4,"title":"hello kitty","description":"also known by her full name kitty white is a fictional character produced by the japanese company sanrio","tag":"green","":""}
|
||||
{"id":"F","word_rank":2,"typo_rank":1,"proximity_rank":0,"attribute_rank":1017,"exact_rank":5,"asc_desc_rank":5,"title":"laptop orchestra","description":"a laptop orchestra lork or lo is a chamber music ensemble consisting primarily of laptops like helo huddersfield experimental laptop orchestra","tag":"blue","":""}
|
||||
{"id":"G","word_rank":1,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":5,"title":"hello world film","description":"hello world is a 2019 japanese animated sci fi romantic drama film directed by tomohiko ito and produced by graphinica","tag":"red","":""}
|
||||
{"id":"H","word_rank":1,"typo_rank":0,"proximity_rank":1,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":4,"title":"world hello day","description":"holiday observed on november 21 to express that conflicts should be resolved through communication rather than the use of force","tag":"green","":""}
|
||||
{"id":"I","word_rank":0,"typo_rank":0,"proximity_rank":8,"attribute_rank":338,"exact_rank":3,"asc_desc_rank":3,"title":"hello world song","description":"hello world is a song written by tom douglas tony lane and david lee and recorded by american country music group lady antebellum","tag":"blue","":""}
|
||||
{"id":"J","word_rank":1,"typo_rank":0,"proximity_rank":1,"attribute_rank":1,"exact_rank":3,"asc_desc_rank":2,"title":"hello cruel world","description":"hello cruel world is an album by new zealand band tall dwarfs","tag":"green","":""}
|
||||
{"id":"K","word_rank":0,"typo_rank":2,"proximity_rank":9,"attribute_rank":670,"exact_rank":5,"asc_desc_rank":1,"title":"ello creation system","description":"in few word ello was a construction toy created by the american company mattel to engage girls in construction play","tag":"red","":""}
|
||||
{"id":"L","word_rank":0,"typo_rank":0,"proximity_rank":2,"attribute_rank":250,"exact_rank":4,"asc_desc_rank":0,"title":"good morning world","description":"good morning world is an american sitcom broadcast on cbs tv during the 1967 1968 season","tag":"blue","":""}
|
||||
{"id":"M","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":0,"asc_desc_rank":0,"title":"hello world america","description":"a perfect match for a perfect engine using the query hello world america","tag":"red","":""}
|
||||
{"id":"N","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":1,"asc_desc_rank":4,"title":"hello world america unleashed","description":"a very good match for a very good engine using the query hello world america","tag":"green","":""}
|
||||
{"id":"O","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":10,"exact_rank":0,"asc_desc_rank":6,"title":"a perfect match for a perfect engine using the query hello world america","description":"hello world america","tag":"blue","":""}
|
||||
{"id":"P","word_rank":0,"typo_rank":0,"proximity_rank":0,"attribute_rank":12,"exact_rank":1,"asc_desc_rank":3,"title":"a very good match for a very good engine using the query hello world america","description":"hello world america unleashed","tag":"red","":""}
|
||||
{"id":"Q","word_rank":1,"typo_rank":0,"proximity_rank":0,"attribute_rank":0,"exact_rank":3,"asc_desc_rank":2,"title":"hello world","description":"a hello world program generally is a computer program that outputs or displays the message hello world","tag":"green","":""}
|
1
milli/tests/mod.rs
Normal file
1
milli/tests/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
mod search;
|
125
milli/tests/search/mod.rs
Normal file
125
milli/tests/search/mod.rs
Normal file
@ -0,0 +1,125 @@
|
||||
use milli::{Criterion, Index, DocumentId};
|
||||
use milli::update::{IndexDocuments, UpdateFormat, Settings};
|
||||
|
||||
use big_s::S;
|
||||
use heed::EnvOpenOptions;
|
||||
use maplit::{hashmap, hashset};
|
||||
use serde::Deserialize;
|
||||
use slice_group_by::GroupBy;
|
||||
|
||||
mod query_criteria;
|
||||
|
||||
pub const TEST_QUERY: &'static str = "hello world america";
|
||||
|
||||
pub const EXTERNAL_DOCUMENTS_IDS: &[&str; 17] = &["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q"];
|
||||
|
||||
pub const CONTENT: &str = include_str!("../assets/test_set.ndjson");
|
||||
|
||||
pub fn setup_search_index_with_criteria(criteria: &[Criterion]) -> Index {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
|
||||
let mut builder = Settings::new(&mut wtxn, &index, 0);
|
||||
|
||||
let criteria = criteria.iter().map(|c| c.to_string()).collect();
|
||||
builder.set_criteria(criteria);
|
||||
builder.set_filterable_fields(hashset!{
|
||||
S("tag"),
|
||||
S("asc_desc_rank"),
|
||||
});
|
||||
builder.set_synonyms(hashmap!{
|
||||
S("hello") => vec![S("good morning")],
|
||||
S("world") => vec![S("earth")],
|
||||
S("america") => vec![S("the united states")],
|
||||
});
|
||||
builder.set_searchable_fields(vec![S("title"),S("description")]);
|
||||
builder.execute(|_, _| ()).unwrap();
|
||||
|
||||
// index documents
|
||||
let mut builder = IndexDocuments::new(&mut wtxn, &index, 0);
|
||||
builder.update_format(UpdateFormat::JsonStream);
|
||||
builder.enable_autogenerate_docids();
|
||||
builder.execute(CONTENT.as_bytes(), |_, _| ()).unwrap();
|
||||
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
index
|
||||
}
|
||||
|
||||
pub fn internal_to_external_ids(index: &Index, internal_ids: &[DocumentId]) -> Vec<String> {
|
||||
let mut rtxn = index.read_txn().unwrap();
|
||||
let docid_map = index.external_documents_ids(&mut rtxn).unwrap();
|
||||
let docid_map: std::collections::HashMap<_, _> = EXTERNAL_DOCUMENTS_IDS.iter().map(|id| (docid_map.get(id).unwrap(), id)).collect();
|
||||
internal_ids.iter().map(|id| docid_map.get(id).unwrap().to_string()).collect()
|
||||
}
|
||||
|
||||
pub fn expected_order(criteria: &[Criterion], authorize_typo: bool, optional_words: bool) -> Vec<TestDocument> {
|
||||
let dataset = serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect();
|
||||
let mut groups: Vec<Vec<TestDocument>> = vec![dataset];
|
||||
|
||||
for criterion in criteria {
|
||||
let mut new_groups = Vec::new();
|
||||
for group in groups.iter_mut() {
|
||||
match criterion {
|
||||
Criterion::Attribute => {
|
||||
group.sort_by_key(|d| d.attribute_rank);
|
||||
new_groups.extend(group.linear_group_by_key(|d| d.attribute_rank).map(Vec::from));
|
||||
},
|
||||
Criterion::Exactness => {
|
||||
group.sort_by_key(|d| d.exact_rank);
|
||||
new_groups.extend(group.linear_group_by_key(|d| d.exact_rank).map(Vec::from));
|
||||
},
|
||||
Criterion::Proximity => {
|
||||
group.sort_by_key(|d| d.proximity_rank);
|
||||
new_groups.extend(group.linear_group_by_key(|d| d.proximity_rank).map(Vec::from));
|
||||
},
|
||||
Criterion::Typo => {
|
||||
group.sort_by_key(|d| d.typo_rank);
|
||||
new_groups.extend(group.linear_group_by_key(|d| d.typo_rank).map(Vec::from));
|
||||
},
|
||||
Criterion::Words => {
|
||||
group.sort_by_key(|d| d.word_rank);
|
||||
new_groups.extend(group.linear_group_by_key(|d| d.word_rank).map(Vec::from));
|
||||
},
|
||||
Criterion::Asc(field_name) if field_name == "asc_desc_rank" => {
|
||||
group.sort_by_key(|d| d.asc_desc_rank);
|
||||
new_groups.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
|
||||
},
|
||||
Criterion::Desc(field_name) if field_name == "asc_desc_rank" => {
|
||||
group.sort_by_key(|d| std::cmp::Reverse(d.asc_desc_rank));
|
||||
new_groups.extend(group.linear_group_by_key(|d| d.asc_desc_rank).map(Vec::from));
|
||||
},
|
||||
Criterion::Asc(_) | Criterion::Desc(_) => new_groups.push(group.clone()),
|
||||
}
|
||||
}
|
||||
groups = std::mem::take(&mut new_groups);
|
||||
}
|
||||
|
||||
if authorize_typo && optional_words {
|
||||
groups.into_iter().flatten().collect()
|
||||
} else if optional_words {
|
||||
groups.into_iter().flatten().filter(|d| d.typo_rank == 0).collect()
|
||||
} else if authorize_typo {
|
||||
groups.into_iter().flatten().filter(|d| d.word_rank == 0).collect()
|
||||
} else {
|
||||
groups.into_iter().flatten().filter(|d| d.word_rank == 0 && d.typo_rank == 0).collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct TestDocument {
|
||||
pub id: String,
|
||||
pub word_rank: u32,
|
||||
pub typo_rank: u32,
|
||||
pub proximity_rank: u32,
|
||||
pub attribute_rank: u32,
|
||||
pub exact_rank: u32,
|
||||
pub asc_desc_rank: u32,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub tag: String,
|
||||
}
|
213
milli/tests/search/query_criteria.rs
Normal file
213
milli/tests/search/query_criteria.rs
Normal file
@ -0,0 +1,213 @@
|
||||
use milli::{Search, SearchResult, Criterion};
|
||||
use big_s::S;
|
||||
|
||||
use crate::search::{self, EXTERNAL_DOCUMENTS_IDS};
|
||||
use Criterion::*;
|
||||
|
||||
const ALLOW_TYPOS: bool = true;
|
||||
const DISALLOW_TYPOS: bool = false;
|
||||
const ALLOW_OPTIONAL_WORDS: bool = true;
|
||||
const DISALLOW_OPTIONAL_WORDS: bool = false;
|
||||
|
||||
macro_rules! test_criterion {
|
||||
($func:ident, $optional_word:ident, $authorize_typos:ident $(, $criterion:expr)?) => {
|
||||
#[test]
|
||||
fn $func() {
|
||||
let criteria = vec![$($criterion)?];
|
||||
let index = search::setup_search_index_with_criteria(&criteria);
|
||||
let mut rtxn = index.read_txn().unwrap();
|
||||
|
||||
let mut search = Search::new(&mut rtxn, &index);
|
||||
search.query(search::TEST_QUERY);
|
||||
search.limit(EXTERNAL_DOCUMENTS_IDS.len());
|
||||
search.authorize_typos($authorize_typos);
|
||||
search.optional_words($optional_word);
|
||||
|
||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
|
||||
let expected_external_ids: Vec<_> = search::expected_order(&criteria, $authorize_typos, $optional_word)
|
||||
.into_iter()
|
||||
.map(|d| d.id).collect();
|
||||
let documents_ids = search::internal_to_external_ids(&index, &documents_ids);
|
||||
assert_eq!(documents_ids, expected_external_ids);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
test_criterion!(none_allow_typo, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS);
|
||||
test_criterion!(none_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS);
|
||||
test_criterion!(words_allow_typo, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Words);
|
||||
test_criterion!(attribute_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Attribute);
|
||||
test_criterion!(attribute_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, Attribute);
|
||||
test_criterion!(exactness_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Exactness);
|
||||
test_criterion!(exactness_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, Exactness);
|
||||
test_criterion!(proximity_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Proximity);
|
||||
test_criterion!(proximity_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, Proximity);
|
||||
test_criterion!(asc_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Asc(S("asc_desc_rank")));
|
||||
test_criterion!(asc_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, Asc(S("asc_desc_rank")));
|
||||
test_criterion!(desc_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Desc(S("asc_desc_rank")));
|
||||
test_criterion!(desc_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, Desc(S("asc_desc_rank")));
|
||||
test_criterion!(asc_unexisting_field_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Asc(S("unexisting_field")));
|
||||
test_criterion!(asc_unexisting_field_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, Asc(S("unexisting_field")));
|
||||
test_criterion!(desc_unexisting_field_allow_typo, DISALLOW_OPTIONAL_WORDS, ALLOW_TYPOS, Desc(S("unexisting_field")));
|
||||
test_criterion!(desc_unexisting_field_disallow_typo, DISALLOW_OPTIONAL_WORDS, DISALLOW_TYPOS, Desc(S("unexisting_field")));
|
||||
|
||||
#[test]
|
||||
fn criteria_mixup() {
|
||||
use Criterion::*;
|
||||
let index = search::setup_search_index_with_criteria(&vec![Words, Attribute, Desc(S("asc_desc_rank")), Exactness, Proximity, Typo]);
|
||||
|
||||
let criteria_mix = {
|
||||
// Criterion doesn't implement Copy, we create a new Criterion using a closure
|
||||
let desc = || Desc(S("asc_desc_rank"));
|
||||
// all possible criteria order
|
||||
vec![
|
||||
vec![Words, Attribute, desc(), Exactness, Proximity, Typo],
|
||||
vec![Words, Attribute, desc(), Exactness, Typo, Proximity],
|
||||
vec![Words, Attribute, desc(), Proximity, Exactness, Typo],
|
||||
vec![Words, Attribute, desc(), Proximity, Typo, Exactness],
|
||||
vec![Words, Attribute, desc(), Typo, Exactness, Proximity],
|
||||
vec![Words, Attribute, desc(), Typo, Proximity, Exactness],
|
||||
vec![Words, Attribute, Exactness, desc(), Proximity, Typo],
|
||||
vec![Words, Attribute, Exactness, desc(), Typo, Proximity],
|
||||
vec![Words, Attribute, Exactness, Proximity, desc(), Typo],
|
||||
vec![Words, Attribute, Exactness, Proximity, Typo, desc()],
|
||||
vec![Words, Attribute, Exactness, Typo, desc(), Proximity],
|
||||
vec![Words, Attribute, Exactness, Typo, Proximity, desc()],
|
||||
vec![Words, Attribute, Proximity, desc(), Exactness, Typo],
|
||||
vec![Words, Attribute, Proximity, desc(), Typo, Exactness],
|
||||
vec![Words, Attribute, Proximity, Exactness, desc(), Typo],
|
||||
vec![Words, Attribute, Proximity, Exactness, Typo, desc()],
|
||||
vec![Words, Attribute, Proximity, Typo, desc(), Exactness],
|
||||
vec![Words, Attribute, Proximity, Typo, Exactness, desc()],
|
||||
vec![Words, Attribute, Typo, desc(), Exactness, Proximity],
|
||||
vec![Words, Attribute, Typo, desc(), Proximity, Exactness],
|
||||
vec![Words, Attribute, Typo, Exactness, desc(), Proximity],
|
||||
vec![Words, Attribute, Typo, Exactness, Proximity, desc()],
|
||||
vec![Words, Attribute, Typo, Proximity, desc(), Exactness],
|
||||
vec![Words, Attribute, Typo, Proximity, Exactness, desc()],
|
||||
vec![Words, desc(), Attribute, Exactness, Proximity, Typo],
|
||||
vec![Words, desc(), Attribute, Exactness, Typo, Proximity],
|
||||
vec![Words, desc(), Attribute, Proximity, Exactness, Typo],
|
||||
vec![Words, desc(), Attribute, Proximity, Typo, Exactness],
|
||||
vec![Words, desc(), Attribute, Typo, Exactness, Proximity],
|
||||
vec![Words, desc(), Attribute, Typo, Proximity, Exactness],
|
||||
vec![Words, desc(), Exactness, Attribute, Proximity, Typo],
|
||||
vec![Words, desc(), Exactness, Attribute, Typo, Proximity],
|
||||
vec![Words, desc(), Exactness, Proximity, Attribute, Typo],
|
||||
vec![Words, desc(), Exactness, Proximity, Typo, Attribute],
|
||||
vec![Words, desc(), Exactness, Typo, Attribute, Proximity],
|
||||
vec![Words, desc(), Exactness, Typo, Proximity, Attribute],
|
||||
vec![Words, desc(), Proximity, Attribute, Exactness, Typo],
|
||||
vec![Words, desc(), Proximity, Attribute, Typo, Exactness],
|
||||
vec![Words, desc(), Proximity, Exactness, Attribute, Typo],
|
||||
vec![Words, desc(), Proximity, Exactness, Typo, Attribute],
|
||||
vec![Words, desc(), Proximity, Typo, Attribute, Exactness],
|
||||
vec![Words, desc(), Proximity, Typo, Exactness, Attribute],
|
||||
vec![Words, desc(), Typo, Attribute, Exactness, Proximity],
|
||||
vec![Words, desc(), Typo, Attribute, Proximity, Exactness],
|
||||
vec![Words, desc(), Typo, Exactness, Attribute, Proximity],
|
||||
vec![Words, desc(), Typo, Exactness, Proximity, Attribute],
|
||||
vec![Words, desc(), Typo, Proximity, Attribute, Exactness],
|
||||
vec![Words, desc(), Typo, Proximity, Exactness, Attribute],
|
||||
vec![Words, Exactness, Attribute, desc(), Proximity, Typo],
|
||||
vec![Words, Exactness, Attribute, desc(), Typo, Proximity],
|
||||
vec![Words, Exactness, Attribute, Proximity, desc(), Typo],
|
||||
vec![Words, Exactness, Attribute, Proximity, Typo, desc()],
|
||||
vec![Words, Exactness, Attribute, Typo, desc(), Proximity],
|
||||
vec![Words, Exactness, Attribute, Typo, Proximity, desc()],
|
||||
vec![Words, Exactness, desc(), Attribute, Proximity, Typo],
|
||||
vec![Words, Exactness, desc(), Attribute, Typo, Proximity],
|
||||
vec![Words, Exactness, desc(), Proximity, Attribute, Typo],
|
||||
vec![Words, Exactness, desc(), Proximity, Typo, Attribute],
|
||||
vec![Words, Exactness, desc(), Typo, Attribute, Proximity],
|
||||
vec![Words, Exactness, desc(), Typo, Proximity, Attribute],
|
||||
vec![Words, Exactness, Proximity, Attribute, desc(), Typo],
|
||||
vec![Words, Exactness, Proximity, Attribute, Typo, desc()],
|
||||
vec![Words, Exactness, Proximity, desc(), Attribute, Typo],
|
||||
vec![Words, Exactness, Proximity, desc(), Typo, Attribute],
|
||||
vec![Words, Exactness, Proximity, Typo, Attribute, desc()],
|
||||
vec![Words, Exactness, Proximity, Typo, desc(), Attribute],
|
||||
vec![Words, Exactness, Typo, Attribute, desc(), Proximity],
|
||||
vec![Words, Exactness, Typo, Attribute, Proximity, desc()],
|
||||
vec![Words, Exactness, Typo, desc(), Attribute, Proximity],
|
||||
vec![Words, Exactness, Typo, desc(), Proximity, Attribute],
|
||||
vec![Words, Exactness, Typo, Proximity, Attribute, desc()],
|
||||
vec![Words, Exactness, Typo, Proximity, desc(), Attribute],
|
||||
vec![Words, Proximity, Attribute, desc(), Exactness, Typo],
|
||||
vec![Words, Proximity, Attribute, desc(), Typo, Exactness],
|
||||
vec![Words, Proximity, Attribute, Exactness, desc(), Typo],
|
||||
vec![Words, Proximity, Attribute, Exactness, Typo, desc()],
|
||||
vec![Words, Proximity, Attribute, Typo, desc(), Exactness],
|
||||
vec![Words, Proximity, Attribute, Typo, Exactness, desc()],
|
||||
vec![Words, Proximity, desc(), Attribute, Exactness, Typo],
|
||||
vec![Words, Proximity, desc(), Attribute, Typo, Exactness],
|
||||
vec![Words, Proximity, desc(), Exactness, Attribute, Typo],
|
||||
vec![Words, Proximity, desc(), Exactness, Typo, Attribute],
|
||||
vec![Words, Proximity, desc(), Typo, Attribute, Exactness],
|
||||
vec![Words, Proximity, desc(), Typo, Exactness, Attribute],
|
||||
vec![Words, Proximity, Exactness, Attribute, desc(), Typo],
|
||||
vec![Words, Proximity, Exactness, Attribute, Typo, desc()],
|
||||
vec![Words, Proximity, Exactness, desc(), Attribute, Typo],
|
||||
vec![Words, Proximity, Exactness, desc(), Typo, Attribute],
|
||||
vec![Words, Proximity, Exactness, Typo, Attribute, desc()],
|
||||
vec![Words, Proximity, Exactness, Typo, desc(), Attribute],
|
||||
vec![Words, Proximity, Typo, Attribute, desc(), Exactness],
|
||||
vec![Words, Proximity, Typo, Attribute, Exactness, desc()],
|
||||
vec![Words, Proximity, Typo, desc(), Attribute, Exactness],
|
||||
vec![Words, Proximity, Typo, desc(), Exactness, Attribute],
|
||||
vec![Words, Proximity, Typo, Exactness, Attribute, desc()],
|
||||
vec![Words, Proximity, Typo, Exactness, desc(), Attribute],
|
||||
vec![Words, Typo, Attribute, desc(), Exactness, Proximity],
|
||||
vec![Words, Typo, Attribute, desc(), Proximity, Exactness],
|
||||
vec![Words, Typo, Attribute, Exactness, desc(), Proximity],
|
||||
vec![Words, Typo, Attribute, Exactness, Proximity, desc()],
|
||||
vec![Words, Typo, Attribute, Proximity, desc(), Exactness],
|
||||
vec![Words, Typo, Attribute, Proximity, Exactness, desc()],
|
||||
vec![Words, Typo, desc(), Attribute, Proximity, Exactness],
|
||||
vec![Words, Typo, desc(), Exactness, Attribute, Proximity],
|
||||
vec![Words, Typo, desc(), Exactness, Attribute, Proximity],
|
||||
vec![Words, Typo, desc(), Exactness, Proximity, Attribute],
|
||||
vec![Words, Typo, desc(), Proximity, Attribute, Exactness],
|
||||
vec![Words, Typo, desc(), Proximity, Exactness, Attribute],
|
||||
vec![Words, Typo, Exactness, Attribute, desc(), Proximity],
|
||||
vec![Words, Typo, Exactness, Attribute, Proximity, desc()],
|
||||
vec![Words, Typo, Exactness, desc(), Attribute, Proximity],
|
||||
vec![Words, Typo, Exactness, desc(), Proximity, Attribute],
|
||||
vec![Words, Typo, Exactness, Proximity, Attribute, desc()],
|
||||
vec![Words, Typo, Exactness, Proximity, desc(), Attribute],
|
||||
vec![Words, Typo, Proximity, Attribute, desc(), Exactness],
|
||||
vec![Words, Typo, Proximity, Attribute, Exactness, desc()],
|
||||
vec![Words, Typo, Proximity, desc(), Attribute, Exactness],
|
||||
vec![Words, Typo, Proximity, desc(), Exactness, Attribute],
|
||||
vec![Words, Typo, Proximity, Exactness, Attribute, desc()],
|
||||
vec![Words, Typo, Proximity, Exactness, desc(), Attribute],
|
||||
]
|
||||
};
|
||||
|
||||
for criteria in criteria_mix {
|
||||
eprintln!("Testing with criteria order: {:?}", &criteria);
|
||||
//update criteria
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
index.put_criteria(&mut wtxn, &criteria).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let mut rtxn = index.read_txn().unwrap();
|
||||
|
||||
let mut search = Search::new(&mut rtxn, &index);
|
||||
search.query(search::TEST_QUERY);
|
||||
search.limit(EXTERNAL_DOCUMENTS_IDS.len());
|
||||
search.optional_words(ALLOW_OPTIONAL_WORDS);
|
||||
search.authorize_typos(ALLOW_TYPOS);
|
||||
|
||||
let SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
|
||||
let expected_external_ids: Vec<_> = search::expected_order(&criteria, ALLOW_OPTIONAL_WORDS, ALLOW_TYPOS)
|
||||
.into_iter()
|
||||
.map(|d| d.id)
|
||||
.collect();
|
||||
let documents_ids = search::internal_to_external_ids(&index, &documents_ids);
|
||||
|
||||
assert_eq!(documents_ids, expected_external_ids);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user