mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-20 05:20:36 +00:00
Compare commits
6 Commits
add-socket
...
v1.2.1
Author | SHA1 | Date | |
---|---|---|---|
541f36f150 | |||
2576c0c462 | |||
eac880118a | |||
75e4fe4e55 | |||
260633b662 | |||
1fb7782777 |
26
Cargo.lock
generated
26
Cargo.lock
generated
@ -463,7 +463,7 @@ checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -1209,7 +1209,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dump"
|
name = "dump"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"big_s",
|
"big_s",
|
||||||
@ -1428,7 +1428,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "file-store"
|
name = "file-store"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"faux",
|
"faux",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@ -1450,7 +1450,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filter-parser"
|
name = "filter-parser"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"insta",
|
"insta",
|
||||||
"nom",
|
"nom",
|
||||||
@ -1476,7 +1476,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flatten-serde-json"
|
name = "flatten-serde-json"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"criterion",
|
"criterion",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -1959,7 +1959,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "index-scheduler"
|
name = "index-scheduler"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"big_s",
|
"big_s",
|
||||||
@ -2113,7 +2113,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "json-depth-checker"
|
name = "json-depth-checker"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"criterion",
|
"criterion",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -2539,7 +2539,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meili-snap"
|
name = "meili-snap"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"insta",
|
"insta",
|
||||||
"md5",
|
"md5",
|
||||||
@ -2548,7 +2548,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch"
|
name = "meilisearch"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-http",
|
"actix-http",
|
||||||
@ -2636,7 +2636,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch-auth"
|
name = "meilisearch-auth"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64 0.21.0",
|
"base64 0.21.0",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
@ -2655,7 +2655,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch-types"
|
name = "meilisearch-types"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
@ -2709,7 +2709,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"big_s",
|
"big_s",
|
||||||
"bimap",
|
"bimap",
|
||||||
@ -3064,7 +3064,7 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "permissive-json-pointer"
|
name = "permissive-json-pointer"
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"big_s",
|
"big_s",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
@ -17,7 +17,7 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.2.0"
|
version = "1.2.1"
|
||||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
description = "Meilisearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
homepage = "https://meilisearch.com"
|
homepage = "https://meilisearch.com"
|
||||||
|
@ -67,10 +67,6 @@ pub(crate) enum Batch {
|
|||||||
op: IndexOperation,
|
op: IndexOperation,
|
||||||
must_create_index: bool,
|
must_create_index: bool,
|
||||||
},
|
},
|
||||||
IndexDocumentDeletionByFilter {
|
|
||||||
index_uid: String,
|
|
||||||
task: Task,
|
|
||||||
},
|
|
||||||
IndexCreation {
|
IndexCreation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
@ -114,6 +110,10 @@ pub(crate) enum IndexOperation {
|
|||||||
documents: Vec<Vec<String>>,
|
documents: Vec<Vec<String>>,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
|
IndexDocumentDeletionByFilter {
|
||||||
|
index_uid: String,
|
||||||
|
task: Task,
|
||||||
|
},
|
||||||
DocumentClear {
|
DocumentClear {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
@ -155,7 +155,6 @@ impl Batch {
|
|||||||
| Batch::TaskDeletion(task)
|
| Batch::TaskDeletion(task)
|
||||||
| Batch::Dump(task)
|
| Batch::Dump(task)
|
||||||
| Batch::IndexCreation { task, .. }
|
| Batch::IndexCreation { task, .. }
|
||||||
| Batch::IndexDocumentDeletionByFilter { task, .. }
|
|
||||||
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
||||||
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
@ -167,6 +166,7 @@ impl Batch {
|
|||||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { task, .. } => vec![task.uid],
|
||||||
IndexOperation::SettingsAndDocumentOperation {
|
IndexOperation::SettingsAndDocumentOperation {
|
||||||
document_import_tasks: tasks,
|
document_import_tasks: tasks,
|
||||||
settings_tasks: other,
|
settings_tasks: other,
|
||||||
@ -194,8 +194,7 @@ impl Batch {
|
|||||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||||
IndexCreation { index_uid, .. }
|
IndexCreation { index_uid, .. }
|
||||||
| IndexUpdate { index_uid, .. }
|
| IndexUpdate { index_uid, .. }
|
||||||
| IndexDeletion { index_uid, .. }
|
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
||||||
| IndexDocumentDeletionByFilter { index_uid, .. } => Some(index_uid),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -205,6 +204,7 @@ impl IndexOperation {
|
|||||||
match self {
|
match self {
|
||||||
IndexOperation::DocumentOperation { index_uid, .. }
|
IndexOperation::DocumentOperation { index_uid, .. }
|
||||||
| IndexOperation::DocumentDeletion { index_uid, .. }
|
| IndexOperation::DocumentDeletion { index_uid, .. }
|
||||||
|
| IndexOperation::IndexDocumentDeletionByFilter { index_uid, .. }
|
||||||
| IndexOperation::DocumentClear { index_uid, .. }
|
| IndexOperation::DocumentClear { index_uid, .. }
|
||||||
| IndexOperation::Settings { index_uid, .. }
|
| IndexOperation::Settings { index_uid, .. }
|
||||||
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
||||||
@ -239,9 +239,12 @@ impl IndexScheduler {
|
|||||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
match &task.kind {
|
match &task.kind {
|
||||||
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
||||||
Ok(Some(Batch::IndexDocumentDeletionByFilter {
|
Ok(Some(Batch::IndexOperation {
|
||||||
index_uid: index_uid.clone(),
|
op: IndexOperation::IndexDocumentDeletionByFilter {
|
||||||
task,
|
index_uid: index_uid.clone(),
|
||||||
|
task,
|
||||||
|
},
|
||||||
|
must_create_index: false,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
@ -887,51 +890,6 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
Batch::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
|
||||||
let (index_uid, filter) =
|
|
||||||
if let KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr } =
|
|
||||||
&task.kind
|
|
||||||
{
|
|
||||||
(index_uid, filter_expr)
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
|
||||||
let index = {
|
|
||||||
let rtxn = self.env.read_txn()?;
|
|
||||||
self.index_mapper.index(&rtxn, index_uid)?
|
|
||||||
};
|
|
||||||
let deleted_documents = delete_document_by_filter(filter, index);
|
|
||||||
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: _,
|
|
||||||
}) = task.details
|
|
||||||
{
|
|
||||||
original_filter
|
|
||||||
} else {
|
|
||||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
|
||||||
unreachable!();
|
|
||||||
};
|
|
||||||
|
|
||||||
match deleted_documents {
|
|
||||||
Ok(deleted_documents) => {
|
|
||||||
task.status = Status::Succeeded;
|
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: Some(deleted_documents),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
task.status = Status::Failed;
|
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: Some(0),
|
|
||||||
});
|
|
||||||
task.error = Some(e.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(vec![task])
|
|
||||||
}
|
|
||||||
Batch::IndexCreation { index_uid, primary_key, task } => {
|
Batch::IndexCreation { index_uid, primary_key, task } => {
|
||||||
let wtxn = self.env.write_txn()?;
|
let wtxn = self.env.write_txn()?;
|
||||||
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
||||||
@ -1288,6 +1246,47 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
||||||
|
let filter =
|
||||||
|
if let KindWithContent::DocumentDeletionByFilter { filter_expr, .. } =
|
||||||
|
&task.kind
|
||||||
|
{
|
||||||
|
filter_expr
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let deleted_documents = delete_document_by_filter(index_wtxn, filter, index);
|
||||||
|
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: _,
|
||||||
|
}) = task.details
|
||||||
|
{
|
||||||
|
original_filter
|
||||||
|
} else {
|
||||||
|
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
match deleted_documents {
|
||||||
|
Ok(deleted_documents) => {
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(deleted_documents),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
task.status = Status::Failed;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(0),
|
||||||
|
});
|
||||||
|
task.error = Some(e.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(vec![task])
|
||||||
|
}
|
||||||
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config);
|
let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
@ -1487,23 +1486,22 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_document_by_filter(filter: &serde_json::Value, index: Index) -> Result<u64> {
|
fn delete_document_by_filter<'a>(
|
||||||
|
wtxn: &mut RwTxn<'a, '_>,
|
||||||
|
filter: &serde_json::Value,
|
||||||
|
index: &'a Index,
|
||||||
|
) -> Result<u64> {
|
||||||
let filter = Filter::from_json(filter)?;
|
let filter = Filter::from_json(filter)?;
|
||||||
Ok(if let Some(filter) = filter {
|
Ok(if let Some(filter) = filter {
|
||||||
let mut wtxn = index.write_txn()?;
|
let candidates = filter.evaluate(wtxn, index).map_err(|err| match err {
|
||||||
|
|
||||||
let candidates = filter.evaluate(&wtxn, &index).map_err(|err| match err {
|
|
||||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||||
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
||||||
}
|
}
|
||||||
e => e.into(),
|
e => e.into(),
|
||||||
})?;
|
})?;
|
||||||
let mut delete_operation = DeleteDocuments::new(&mut wtxn, &index)?;
|
let mut delete_operation = DeleteDocuments::new(wtxn, index)?;
|
||||||
delete_operation.delete_documents(&candidates);
|
delete_operation.delete_documents(&candidates);
|
||||||
let deleted_documents =
|
delete_operation.execute().map(|result| result.deleted_documents)?
|
||||||
delete_operation.execute().map(|result| result.deleted_documents)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
deleted_documents
|
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
})
|
})
|
||||||
|
@ -223,7 +223,9 @@ impl IndexMap {
|
|||||||
enable_mdb_writemap: bool,
|
enable_mdb_writemap: bool,
|
||||||
map_size_growth: usize,
|
map_size_growth: usize,
|
||||||
) {
|
) {
|
||||||
let Some(index) = self.available.remove(uuid) else { return; };
|
let Some(index) = self.available.remove(uuid) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
self.close(*uuid, index, enable_mdb_writemap, map_size_growth);
|
self.close(*uuid, index, enable_mdb_writemap, map_size_growth);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,9 +147,7 @@ impl Key {
|
|||||||
fn parse_expiration_date(
|
fn parse_expiration_date(
|
||||||
string: Option<String>,
|
string: Option<String>,
|
||||||
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
|
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
|
||||||
let Some(string) = string else {
|
let Some(string) = string else { return Ok(None) };
|
||||||
return Ok(None)
|
|
||||||
};
|
|
||||||
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
|
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
|
||||||
datetime
|
datetime
|
||||||
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||||
|
@ -154,6 +154,19 @@ async fn delete_document_by_filter() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 4,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 3,
|
||||||
|
"id": 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) =
|
||||||
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
|
||||||
snapshot!(code, @"202 Accepted");
|
snapshot!(code, @"202 Accepted");
|
||||||
@ -188,6 +201,18 @@ async fn delete_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 2,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 1,
|
||||||
|
"id": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(documents), @r###"
|
snapshot!(json_string!(documents), @r###"
|
||||||
@ -241,6 +266,18 @@ async fn delete_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
let (stats, _) = index.stats().await;
|
||||||
|
snapshot!(json_string!(stats), @r###"
|
||||||
|
{
|
||||||
|
"numberOfDocuments": 1,
|
||||||
|
"isIndexing": false,
|
||||||
|
"fieldDistribution": {
|
||||||
|
"color": 1,
|
||||||
|
"id": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
let (documents, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(documents), @r###"
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
@ -125,7 +125,12 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let Some(next_bucket) = ranking_rules[cur_ranking_rule_index].next_bucket(ctx, logger, &ranking_rule_universes[cur_ranking_rule_index])? else {
|
let Some(next_bucket) = ranking_rules[cur_ranking_rule_index].next_bucket(
|
||||||
|
ctx,
|
||||||
|
logger,
|
||||||
|
&ranking_rule_universes[cur_ranking_rule_index],
|
||||||
|
)?
|
||||||
|
else {
|
||||||
back!();
|
back!();
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
@ -193,9 +193,10 @@ impl<'ctx, G: RankingRuleGraphTrait> RankingRule<'ctx, QueryGraph> for GraphBase
|
|||||||
.all_costs
|
.all_costs
|
||||||
.get(state.graph.query_graph.root_node)
|
.get(state.graph.query_graph.root_node)
|
||||||
.iter()
|
.iter()
|
||||||
.find(|c| **c >= state.cur_cost) else {
|
.find(|c| **c >= state.cur_cost)
|
||||||
self.state = None;
|
else {
|
||||||
return Ok(None);
|
self.state = None;
|
||||||
|
return Ok(None);
|
||||||
};
|
};
|
||||||
state.cur_cost = cost + 1;
|
state.cur_cost = cost + 1;
|
||||||
|
|
||||||
|
@ -80,7 +80,9 @@ impl MatchingWords {
|
|||||||
let word = self.word_interner.get(*word);
|
let word = self.word_interner.get(*word);
|
||||||
// if the word is a prefix we match using starts_with.
|
// if the word is a prefix we match using starts_with.
|
||||||
if located_words.is_prefix && token.lemma().starts_with(word) {
|
if located_words.is_prefix && token.lemma().starts_with(word) {
|
||||||
let Some((char_index, c)) = word.char_indices().take(located_words.original_char_count).last() else {
|
let Some((char_index, c)) =
|
||||||
|
word.char_indices().take(located_words.original_char_count).last()
|
||||||
|
else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let prefix_length = char_index + c.len_utf8();
|
let prefix_length = char_index + c.len_utf8();
|
||||||
|
@ -176,9 +176,7 @@ impl QueryTermSubset {
|
|||||||
|
|
||||||
pub fn use_prefix_db(&self, ctx: &SearchContext) -> Option<Word> {
|
pub fn use_prefix_db(&self, ctx: &SearchContext) -> Option<Word> {
|
||||||
let original = ctx.term_interner.get(self.original);
|
let original = ctx.term_interner.get(self.original);
|
||||||
let Some(use_prefix_db) = original.zero_typo.use_prefix_db else {
|
let Some(use_prefix_db) = original.zero_typo.use_prefix_db else { return None };
|
||||||
return None
|
|
||||||
};
|
|
||||||
let word = match &self.zero_typo_subset {
|
let word = match &self.zero_typo_subset {
|
||||||
NTypoTermSubset::All => Some(use_prefix_db),
|
NTypoTermSubset::All => Some(use_prefix_db),
|
||||||
NTypoTermSubset::Subset { words, phrases: _ } => {
|
NTypoTermSubset::Subset { words, phrases: _ } => {
|
||||||
@ -264,13 +262,15 @@ impl QueryTermSubset {
|
|||||||
|
|
||||||
match &self.one_typo_subset {
|
match &self.one_typo_subset {
|
||||||
NTypoTermSubset::All => {
|
NTypoTermSubset::All => {
|
||||||
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo else {
|
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo
|
||||||
|
else {
|
||||||
panic!()
|
panic!()
|
||||||
};
|
};
|
||||||
result.extend(one_typo.iter().copied().map(Word::Derived))
|
result.extend(one_typo.iter().copied().map(Word::Derived))
|
||||||
}
|
}
|
||||||
NTypoTermSubset::Subset { words, phrases: _ } => {
|
NTypoTermSubset::Subset { words, phrases: _ } => {
|
||||||
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo else {
|
let Lazy::Init(OneTypoTerm { split_words: _, one_typo }) = &original.one_typo
|
||||||
|
else {
|
||||||
panic!()
|
panic!()
|
||||||
};
|
};
|
||||||
result.extend(one_typo.intersection(words).copied().map(Word::Derived));
|
result.extend(one_typo.intersection(words).copied().map(Word::Derived));
|
||||||
@ -280,15 +280,11 @@ impl QueryTermSubset {
|
|||||||
|
|
||||||
match &self.two_typo_subset {
|
match &self.two_typo_subset {
|
||||||
NTypoTermSubset::All => {
|
NTypoTermSubset::All => {
|
||||||
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else {
|
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else { panic!() };
|
||||||
panic!()
|
|
||||||
};
|
|
||||||
result.extend(two_typos.iter().copied().map(Word::Derived));
|
result.extend(two_typos.iter().copied().map(Word::Derived));
|
||||||
}
|
}
|
||||||
NTypoTermSubset::Subset { words, phrases: _ } => {
|
NTypoTermSubset::Subset { words, phrases: _ } => {
|
||||||
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else {
|
let Lazy::Init(TwoTypoTerm { two_typos }) = &original.two_typo else { panic!() };
|
||||||
panic!()
|
|
||||||
};
|
|
||||||
result.extend(two_typos.intersection(words).copied().map(Word::Derived));
|
result.extend(two_typos.intersection(words).copied().map(Word::Derived));
|
||||||
}
|
}
|
||||||
NTypoTermSubset::Nothing => {}
|
NTypoTermSubset::Nothing => {}
|
||||||
@ -312,13 +308,15 @@ impl QueryTermSubset {
|
|||||||
|
|
||||||
match &self.one_typo_subset {
|
match &self.one_typo_subset {
|
||||||
NTypoTermSubset::All => {
|
NTypoTermSubset::All => {
|
||||||
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo else {
|
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo
|
||||||
|
else {
|
||||||
panic!();
|
panic!();
|
||||||
};
|
};
|
||||||
result.extend(split_words.iter().copied());
|
result.extend(split_words.iter().copied());
|
||||||
}
|
}
|
||||||
NTypoTermSubset::Subset { phrases, .. } => {
|
NTypoTermSubset::Subset { phrases, .. } => {
|
||||||
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo else {
|
let Lazy::Init(OneTypoTerm { split_words, one_typo: _ }) = &original.one_typo
|
||||||
|
else {
|
||||||
panic!();
|
panic!();
|
||||||
};
|
};
|
||||||
if let Some(split_words) = split_words {
|
if let Some(split_words) = split_words {
|
||||||
|
@ -18,7 +18,7 @@ pub fn build_edges(
|
|||||||
return Ok(vec![(
|
return Ok(vec![(
|
||||||
(right_ngram_length - 1) as u32,
|
(right_ngram_length - 1) as u32,
|
||||||
conditions_interner.insert(ProximityCondition::Term { term: right_term.clone() }),
|
conditions_interner.insert(ProximityCondition::Term { term: right_term.clone() }),
|
||||||
)])
|
)]);
|
||||||
};
|
};
|
||||||
|
|
||||||
if left_term.positions.end() + 1 != *right_term.positions.start() {
|
if left_term.positions.end() + 1 != *right_term.positions.start() {
|
||||||
|
@ -2045,10 +2045,11 @@ mod tests {
|
|||||||
"branch_id_number": 0
|
"branch_id_number": 0
|
||||||
}]};
|
}]};
|
||||||
|
|
||||||
let Err(Error::UserError(UserError::MultiplePrimaryKeyCandidatesFound {
|
let Err(Error::UserError(UserError::MultiplePrimaryKeyCandidatesFound { candidates })) =
|
||||||
candidates
|
index.add_documents(doc_multiple_ids)
|
||||||
})) =
|
else {
|
||||||
index.add_documents(doc_multiple_ids) else { panic!("Expected Error::UserError(MultiplePrimaryKeyCandidatesFound)") };
|
panic!("Expected Error::UserError(MultiplePrimaryKeyCandidatesFound)")
|
||||||
|
};
|
||||||
|
|
||||||
assert_eq!(candidates, vec![S("id"), S("project_id"), S("public_uid"),]);
|
assert_eq!(candidates, vec![S("id"), S("project_id"), S("public_uid"),]);
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user